++ cleanup

This commit is contained in:
Nico Schottelius 2018-03-30 14:31:55 +02:00
parent 406bd43f4a
commit e429c7adda
20 changed files with 5 additions and 844 deletions

5
ancient/README.md Normal file
View File

@ -0,0 +1,5 @@
Only kept for historic reasons:
- use at your own risk
- use for inspiration
- but don't use for production

View File

@ -1,571 +0,0 @@
#!/usr/bin/perl -wT
#
# TWiki Collaboration Platform, http://TWiki.org/
#
# Copyright (C) 2000-2003 Peter Thoeny, peter@thoeny.com
# Copyright (C) 2002 Richard Donkin, rdonkin@bigfoot.com
#
# For licensing info read license.txt file in the TWiki root.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details, published at
# http://www.gnu.org/copyleft/gpl.html
#
# DESCRIPTION
# This script does update the usage statistics of each TWiki web.
# It reads the current month's log file and updates the table
# in the WebStatistics topic of each web.
# The script should be called by a cron job, it is recommended to
# call it once a day.
# Set library paths in @INC, at compile time
BEGIN { unshift @INC, '.'; require 'setlib.cfg'; }
# 'Use locale' for internationalisation of Perl regexes -
# main locale settings are done in TWiki::setupLocale
BEGIN {
# Do a dynamic 'use locale' for this module
if( $TWiki::useLocale ) {
require locale;
import locale ();
}
}
use CGI::Carp qw(fatalsToBrowser);
use CGI;
use File::Copy qw(copy);
use IO::File;
use TWiki;
use strict;
# use diagnostics; # Debug only
#open(STDERR,'>&STDOUT'); # redirect error to browser
# FIXME: Beware global $| in mod_perl! Should use 'local'
$| = 1; # no buffering
&main();
# =========================
sub main
{
# initialize variables
my $cgiQuery = undef;
my $tmp = "";
my $theTopic = "";
my $thePathInfo = "";
my $theRemoteUser = "";
my $logDate = "";
# determine at runtime if script is called by browser or cron job
if( $ENV{'DOCUMENT_ROOT'} ) {
# script is called by browser
$cgiQuery = new CGI;
$thePathInfo = $cgiQuery->path_info() || "";
$theRemoteUser = $cgiQuery->remote_user() || "";
$theTopic = $cgiQuery->param( 'topic' ) || "";
$tmp = $cgiQuery->param( 'logdate' ) || "";
$tmp =~ s/[^0-9]//g; # remove all non numerals
if( $tmp ne "" ) {
$logDate = "$tmp";
}
TWiki::writeHeader( $cgiQuery );
print "<html>\n<head>\n<title>TWiki: Create Usage Statistics</title>\n";
print "</head>\n<body>\n";
} else {
# script is called by cron job
}
# Set up locale and regexes
&TWiki::basicInitialize();
# Initial messages
printMsg( "TWiki: Create Usage Statistics", $cgiQuery );
if( $cgiQuery ) {
print "<h4><font color=\"red\">Do not interrupt this script!</font> ( Please wait until page download has finished )</h4>\n";
}
if ( $theRemoteUser ) {
$tmp = &TWiki::userToWikiName( $theRemoteUser );
$tmp =~ s/^Main\.//;
printMsg( "* Executed by $tmp", $cgiQuery );
} else {
printMsg( "* Executed by a guest or a cron job scheduler", $cgiQuery );
}
if( ! $logDate ) {
# get current local time and format to "yyyymm" format:
my ( $sec, $min, $hour, $mday, $mon, $year) = localtime( time() );
$year = sprintf("%.4u", $year + 1900); # Y2K fix
$mon = $mon+1;
$logDate = sprintf("%.4u%.2u", $year, $mon);
}
my $logMonth;
my $logYear;
$tmp = $logDate;
$tmp =~ s/([0-9]{4})(.*)/$2/g;
if( $tmp && $tmp < 13 ) {
$logMonth = $TWiki::isoMonth[$tmp-1];
} else {
$logMonth = "Date error";
}
$logYear = $logDate;
$logYear =~ s/([0-9]{4})(.*)/$1/g;
my $logMonthYear = "$logMonth $logYear";
printMsg( "* Statistics for $logMonthYear", $cgiQuery );
my $logFile = $TWiki::logFilename;
$logFile =~ s/%DATE%/$logDate/g;
if( -e $logFile ) {
# Copy the log file to temp file, since analysis could take some time
# FIXME move the temp dir stuff to TWiki.cfg
my $tmpDir;
if ( $TWiki::OS eq "UNIX" ) {
$tmpDir = $ENV{'TEMP'} || "/tmp";
} elsif ( $TWiki::OS eq "WINDOWS" ) {
$tmpDir = $ENV{'TEMP'} || "c:/";
} else {
# FIXME handle other OSs properly - assume Unix for now.
$tmpDir = "/tmp";
}
my $randNo = int ( rand 1000); # For mod_perl with threading...
my $tmpFilename = "$tmpDir/twiki-stats.$$.$randNo";
$tmpFilename =~ /(.*)/; $tmpFilename = $1; # Untaint
File::Copy::copy ($logFile, $tmpFilename)
or die "Can't copy $logFile to $tmpFilename - $!";
# Open the temp file
my $TMPFILE = new IO::File;
open $TMPFILE, $tmpFilename
or die "Can't open $tmpFilename - $!";
# Do a single data collection pass on the temporary copy of logfile,
# then call processWeb once for each web of interest.
my ($viewRef, $contribRef, $statViewsRef, $statSavesRef,
$statUploadsRef) = collectLogData( $TMPFILE, $logMonthYear );
=pod
# DEBUG ONLY
debugPrintHash($viewRef);
debugPrintHash($contribRef);
print "statViews tests===========\n";
print "Views in Main = " . ${$statViewsRef}{'Main'} . "\n";
print "hash stats (used/avail) = " . %{$statViewsRef}."\n";
foreach my $web (keys %{$statViewsRef}) {
print "Web summary for $web\n";
print $statViewsRef->{$web}."\n";
print $statSavesRef->{$web}."\n";
print $statUploadsRef->{$web}."\n";
}
=cut
# Generate WebStatistics topic update for one or more webs
if ( $thePathInfo ) {
# do a particular web:
processWeb( $thePathInfo, $theRemoteUser, $theTopic, $logMonthYear, $viewRef, $contribRef,
$statViewsRef, $statSavesRef, $statUploadsRef, $cgiQuery );
} else {
# do all webs:
my $dataDir = &TWiki::getDataDir();
my @weblist = ();
if( opendir( DIR, "$dataDir" ) ) {
@weblist = grep /^[^\.\_]/, readdir DIR; # exclude webs starting with . or _
closedir DIR;
} else {
printMsg( " *** Error: opendir $dataDir, $!", $cgiQuery );
}
foreach my $web ( @weblist ) {
if( -d "$dataDir/$web" ) {
processWeb( "/$web", $theRemoteUser, $theTopic, $logMonthYear, $viewRef, $contribRef,
$statViewsRef, $statSavesRef, $statUploadsRef, $cgiQuery );
}
}
}
close $TMPFILE; # Shouldn't be necessary with 'my'
unlink $tmpFilename; # FIXME: works on Windows??? Unlink before
# usage to ensure deleted on crash?
} else {
printMsg( " - Note: Log file $logFile does not exist", $cgiQuery );
}
if( $cgiQuery ) {
$tmp = $TWiki::statisticsTopicname;
my $url = &TWiki::getViewUrl( "", $tmp );
printMsg( "* Go back to <a href=\"$url\">$tmp</a> topic", $cgiQuery );
printMsg( "End creating usage statistics", $cgiQuery );
print "</body></html>\n";
} else {
printMsg( "End creating usage statistics", $cgiQuery );
}
}
# Debug only
# Print all entries in a view or contrib hash, sorted by web and item name
sub debugPrintHash {
my ($statsRef) = @_;
# print "Main.WebHome views = " . ${$statsRef}{'Main'}{'WebHome'}."\n";
# print "Main web, TWikiGuest contribs = " . ${$statsRef}{'Main'}{'Main.TWikiGuest'}."\n";
foreach my $web ( sort keys %$statsRef) {
my $count = 0;
print "$web web:\n";
# Get reference to the sub-hash for this web
my $webhashref = ${$statsRef}{$web};
# print "webhashref is " . ref ($webhashref) ."\n";
# Items can be topics (for view hash) or users (for contrib hash)
foreach my $item ( sort keys %$webhashref ) {
print " $item = ";
print "" . ( ${$webhashref}{$item} || 0 ) ."\n";
$count += ${$webhashref}{$item};
}
print " WEB TOTAL = $count\n";
}
}
# =========================
# Process the whole log file and collect information in hash tables.
# Must build stats for all webs, to handle case of renames into web
# requested for a single-web statistics run.
#
# Main hash tables are divided by web:
#
# $view{$web}{$TopicName} == number of views, by topic
# $contrib{$web}{"Main.".$WikiName} == number of saves/uploads, by user
sub collectLogData
{
my( $TMPFILE, $theLogMonthYear ) = @_;
# Examples of log file format:
# | 03 Feb 2000 - 02:43 | Main.PeterThoeny | view | Know.WebHome | |
# | 03 Feb 2000 - 02:43 | Main.PeterThoeny | save | Know.WebHome | |
# | 03 Feb 2000 - 02:53 | Main.PeterThoeny | save | Know.WebHome | repRev 1.7 Main.PeterThoeny 2000/02/03 02:43:22 |
# | 23 Feb 2002 - 11:07 | Main.TWikiGuest | search | Main | Office *Locations[^A-Za-z] | 127.0.0.1 |
# Note: there's no topic name on search log entry
# | 23 Feb 2002 - 11:07 | Main.guest | search | Main | Office *Locations[^A-Za-z] | 127.0.0.1 |
# | 28 Mar 2002 - 07:11 | Main.FredBloggs | rename | Test.TestTopic7 | moved to Test.TestTopic7New | 127.0.0.1 |
my %view; # Hash of hashes, counts topic views by <web, topic>
my %contrib; # Hash of hashes, counts uploads/saves by <web, user>
# Hashes for each type of statistic, one hash entry per web
my %statViews;
my %statSaves;
my %statUploads;
# Imported regex objects, supporting I18N
my $webNameRegex = $TWiki::webNameRegex;
my $wikiWordRegex = $TWiki::wikiWordRegex;
my $abbrevRegex = $TWiki::abbrevRegex;
# Script regexes
my $intranetUserRegex = qr/[a-z0-9]+/; # FIXME: should centralise this
my $userRegex = qr/(?:$intranetUserRegex|$wikiWordRegex)/;
my $opRegex = qr/[a-z0-9]+/; # Operation, no i18n needed
# my $topicRegex = qr/(?:$wikiWordRegex|$abbrevRegex)/; # Strict topic names only
my $topicRegex = qr/[^ ]+/; # Relaxed topic names - any non-space OK
# but won't be auto-linked in WebStatistics
my $errorRegex = qr/\(not exist\)/; # Match '(not exist)' flag
my ($webName, $opName, $topicName, $userName, $newTopicName, $newTopicWeb);
binmode $TMPFILE;
while ( <$TMPFILE> ) {
my $line = $_;
$line =~ s/\r*\n$//; # Clean out line endings
$line =~ /^\|[^\|]*\| ($webNameRegex\.$userRegex) \| ($opRegex) \| ($webNameRegex)[. ]/o;
$userName = $1 || ""; # Main.FredBloggs
$opName = $2 || "";
$webName = $3 || "";
# Skip bad logfile lines and warn if necessary
unless ($userName && $opName && $webName) {
if( $TWiki::doDebugStatistics ) {
TWiki::writeDebug("Invalid log file line = '$line'");
TWiki::writeDebug("userName = '$userName'");
TWiki::writeDebug("opName = '$opName'");
TWiki::writeDebug("webName = '$webName'");
}
next;
}
my $logContrib = 0;
if ($opName eq 'view' ) {
$statViews{$webName}++;
# Pick up the topic name and any error string
$line =~ /^\|[^\|]*\| ($webNameRegex\.$userRegex) \| ($opRegex) \| ($webNameRegex)\.($topicRegex) \| +(${errorRegex}?) */o;
$topicName = $4 || "";
my $noSuchTopic = $5 || ""; # Set if '(not exist)' matched
unless( $topicName ) {
if( $TWiki::doDebugStatistics ) {
TWiki::writeDebug("Invalid log file line = '$line'");
TWiki::writeDebug("userName = '$userName'");
TWiki::writeDebug("opName = '$opName'");
TWiki::writeDebug("webName = '$webName'");
TWiki::writeDebug("topicName = '$topicName'");
}
next;
}
# Skip accesses to non-existent topics
if ($noSuchTopic) {
next;
} else {
# Count this topic access
$view{$webName}{$topicName}++;
}
} elsif ($opName eq 'save' ) {
$statSaves{$webName}++;
$logContrib = 1;
} elsif ($opName eq 'upload' ) {
$statUploads{$webName}++;
$logContrib = 1;
} elsif ($opName eq 'rename' ) {
# Pick up the old and new topic names
$line =~ /^\|[^\|]*\| ($webNameRegex\.$userRegex) \| ($opRegex) \| ($webNameRegex)\.($topicRegex) \| moved to ($webNameRegex)\.($topicRegex) /o;
$topicName = $4 || "";
$newTopicWeb = $5 || "";
$newTopicName = $6 || "";
## TWiki::writeDebug("$topicName renamed to $newTopicWeb.$newTopicName");
unless ($topicName && $newTopicWeb && $newTopicName) {
if( $TWiki::doDebugStatistics ) {
TWiki::writeDebug("Invalid log file line (rename) = '$line'");
TWiki::writeDebug("userName = '$userName'");
TWiki::writeDebug("opName = '$opName'");
TWiki::writeDebug("webName = '$webName'");
TWiki::writeDebug("topicName= '$topicName'");
TWiki::writeDebug("newTopicWeb= '$newTopicWeb'");
TWiki::writeDebug("newTopicName = '$newTopicName'");
}
next;
}
# Get number of views for old topic this month (may be zero)
my $oldViews = $view{$webName}{$topicName} || 0;
# Transfer views from old to new topic
$view{$newTopicWeb}{$newTopicName} = $oldViews;
delete $view{$webName}{$topicName};
# Transfer views from old to new web
if ( $newTopicWeb ne $webName ) {
$statViews{$webName} -= $oldViews;
$statViews{$newTopicWeb} += $oldViews;
}
}
# Record saves and uploads
if ($logContrib) {
# Record the contribution by user name
$contrib{$webName}{$userName}++;
}
=pod
# DEBUG
$. <= 5 && print "$line\n";
print "$line\n";
print "$.: $userName did $opName on $webName";
print ".$topicName" if (defined $topicName);
print "\n";
=cut
}
=pod
print "Main.WebHome views = " . $view{'Main'}{'WebHome'}."\n";
print "Main web's contribs = " . $contrib{'Main'}{'Main.RichardDonkin'}."\n";
debugPrintHash(\%view);
debugPrintHash(\%contrib);
=cut
return \%view, \%contrib, \%statViews, \%statSaves, \%statUploads;
}
# =========================
sub processWeb
{
my( $thePathInfo, $theRemoteUser, $theTopic, $theLogMonthYear, $viewRef, $contribRef,
$statViewsRef, $statSavesRef, $statUploadsRef, $cgiQuery ) = @_;
my ( $topic, $webName, $dummy, $userName, $dataDir ) =
&TWiki::initialize( $thePathInfo, $theRemoteUser, $theTopic, "", $cgiQuery );
$dummy = ""; # to suppress warning
printMsg( "* Reporting on TWiki.$webName web", $cgiQuery );
if( ! &TWiki::Store::webExists( $webName ) ) {
printMsg( " *** Error: Web $webName does not exist", $cgiQuery );
return;
}
# Handle null values, print summary message to browser/stdout
my $statViews = $statViewsRef->{$webName};
my $statSaves = $statSavesRef->{$webName};
my $statUploads = $statUploadsRef->{$webName};
$statViews ||= 0;
$statSaves ||= 0;
$statUploads ||= 0;
printMsg( " - view: $statViews, save: $statSaves, upload: $statUploads", $cgiQuery );
# Get the top N views and contribs in this web
my (@topViews) = getTopList( $TWiki::statsTopViews, $webName, $viewRef );
my (@topContribs) = getTopList( $TWiki::statsTopContrib, $webName, $contribRef );
# Print information to stdout
my $statTopViews = "";
my $statTopContributors = "";
if( @topViews ) {
printMsg( " - top view: $topViews[0]", $cgiQuery );
$statTopViews = join( "<br /> ", @topViews );
}
if( @topContribs ) {
printMsg( " - top contributor: $topContribs[0]", $cgiQuery );
$statTopContributors = join( "<br /> ", @topContribs );
}
# Update the WebStatistics topic
my $tmp;
my $statsTopic = $TWiki::statisticsTopicname;
# DEBUG
# $statsTopic = "TestStatistics"; # Create this by hand
if( &TWiki::Store::topicExists( $webName, $statsTopic ) ) {
my( $meta, $text ) = &TWiki::Store::readTopic( $webName, $statsTopic, 1 );
my @lines = split( /\n/, $text );
my $statLine;
my $idxStat = -1;
my $idxTmpl = -1;
for( my $x = 0; $x < @lines; $x++ ) {
$tmp = $lines[$x];
# Check for existing line for this month+year
if( $tmp =~ /$theLogMonthYear/ ) {
$idxStat = $x;
} elsif( $tmp =~ /<\!\-\-statDate\-\->/ ) {
$statLine = $tmp;
$idxTmpl = $x;
}
}
if( ! $statLine ) {
$statLine = "| <!--statDate--> | <!--statViews--> | <!--statSaves--> | <!--statUploads--> | <!--statTopViews--> | <!--statTopContributors--> |";
}
$statLine =~ s/<\!\-\-statDate\-\->/$theLogMonthYear/;
$statLine =~ s/<\!\-\-statViews\-\->/ $statViews/;
$statLine =~ s/<\!\-\-statSaves\-\->/ $statSaves/;
$statLine =~ s/<\!\-\-statUploads\-\->/ $statUploads/;
$statLine =~ s/<\!\-\-statTopViews\-\->/$statTopViews/;
$statLine =~ s/<\!\-\-statTopContributors\-\->/$statTopContributors/;
if( $idxStat >= 0 ) {
# entry already exists, need to update
$lines[$idxStat] = $statLine;
} elsif( $idxTmpl >= 0 ) {
# entry does not exist, add after <!--statDate--> line
$lines[$idxTmpl] = "$lines[$idxTmpl]\n$statLine";
} else {
# entry does not exist, add at the end
$lines[@lines] = $statLine;
}
$text = join( "\n", @lines );
$text .= "\n";
&TWiki::Store::saveTopic( $webName, $statsTopic, $text, $meta, "", 1, 1, 1 );
printMsg( " - Topic $statsTopic updated", $cgiQuery );
} else {
printMsg( " *** Warning: No updates done, topic $webName.$statsTopic does not exist", $cgiQuery );
}
}
# =========================
# Get the items with top N frequency counts
# Items can be topics (for view hash) or users (for contrib hash)
sub getTopList
{
my( $theMaxNum, $webName, $statsRef ) = @_;
# Get reference to the sub-hash for this web
my $webhashref = $statsRef->{$webName};
# print "Main.WebHome views = " . $statsRef->{$webName}{'WebHome'}."\n";
# print "Main web, TWikiGuest contribs = " . ${$statsRef}{$webName}{'Main.TWikiGuest'}."\n";
my @list = ();
my $topicName;
my $statValue;
# Convert sub hash of item=>statsvalue pairs into an array, @list,
# of '$statValue $topicName', ready for sorting.
while( ( $topicName, $statValue ) = each( %$webhashref ) ) {
# Right-align statistic value for sorting
$statValue = sprintf "%7d", $statValue;
# Add new array item at end of array
$list[@list] = "$statValue $topicName";
}
# DEBUG
# print " top N list for $webName\n";
# print join "\n", @list;
# Sort @list by frequency and pick the top N entries
if( @list ) {
@list = map{ s/^([0-9][0-9][^0-9])/\&nbsp\;$1/; $_ }
map{ s/^([0-9][^0-9])/\&nbsp\;\&nbsp\;$1/; $_ }
map{ s/^\s*//; $_ }
reverse( sort( @list ) );
if( $theMaxNum >= @list ) {
$theMaxNum = @list - 1;
}
return @list[0..$theMaxNum];
}
return @list;
}
# =========================
sub printMsg
{
my( $msg, $cgiQuery ) = @_;
my $htmlMsg = $msg;
if( $cgiQuery ) {
# TODO: May need to fix this regex if internationalised script
# messages are supported in future.
if( $htmlMsg =~ /^[A-Z]/ ) {
$htmlMsg =~ s/^([A-Z].*)/<h3>$1<\/h3>/go;
} else {
$htmlMsg =~ s/(\*\*\*.*)/<font color=\"FF0000\">$1<\/font>/go;
$htmlMsg =~ s/^\s\s/&nbsp;&nbsp;/go;
$htmlMsg =~ s/^\s/&nbsp;/go;
$htmlMsg .= "<br />";
}
$htmlMsg =~ s/==([A-Z]*)==/<font color=\"FF0000\">==$1==<\/font>/go;
print "$htmlMsg\n";
} else {
print "$msg\n";
}
}
# =========================
# EOF

5
tl
View File

@ -1,5 +0,0 @@
#!/bin/sh
limit=$1; shift
task list limit:$limit "$@"

2
tmutt
View File

@ -1,2 +0,0 @@
#!/bin/sh
x-terminal-emulator -name mutt -e mutt "$@"

View File

@ -1,20 +0,0 @@
# fstab
cat /etc/fstab | sed 's,^/dev/discs/disc0/part3,#/dev/discs/disc0/part3,' > \
/tmp/fstab
cat /tmp/fstab | sed 's,^/dev/discs/disc0/part1,#/dev/discs/disc0/part1,' > \
/etc/fstab
cat /etc/fstab | sed 's,^\(/dev/root.*\)ext3,\1reiserfs,' > \
/tmp/fstab
mv /tmp/fstab /etc/fstab
# X
cat /etc/X11/XF86Config | sed 's/\(Driver.*\)"ati"/\1"nv"/' > /tmp/xf86conf
cat /tmp/xf86conf | sed 's,\(Option.*"Protocol".*\)"ps/2",\1"MouseMan",' > \
/etc/X11/XF86Config
cat /etc/X11/XF86Config | sed 's,\(Option.*"Device".*\)"/dev/misc/psaux",\1"/dev/tts/0",' > /tmp/xf86conf
mv /tmp/xf86conf /etc/X11/XF86Config

View File

@ -1,20 +0,0 @@
# fstab
cat /etc/fstab | sed 's,^#/dev/discs/disc0/part3,/dev/discs/disc0/part3,' > \
/tmp/fstab
cat /tmp/fstab | sed 's,^#/dev/discs/disc0/part1,/dev/discs/disc0/part1,' > \
/etc/fstab
cat /etc/fstab | sed 's,^\(/dev/root.*\)reiserfs,\1ext3,' > \
/tmp/fstab
mv /tmp/fstab /etc/fstab
# X
cat /etc/X11/XF86Config | sed 's/\(Driver.*\)"nv"/\1"ati"/' > /tmp/xf86conf
cat /tmp/xf86conf | sed 's,\(Option.*"Protocol".*\)"MouseMan",\1"ps/2",' > \
/etc/X11/XF86Config
cat /etc/X11/XF86Config | sed 's,\(Option.*"Device".*\)"/dev/tts/0",\1"/dev/misc/psaux",' > /tmp/xf86conf
mv /tmp/xf86conf /etc/X11/XF86Config

View File

@ -1,6 +0,0 @@
#!/bin/sh
for screen in $(screen -ls | awk '/Detached/ { print $1 }'); do
echo screen -Ux $screen;
screen -Ux $screen;
done

View File

@ -1,8 +0,0 @@
#!/bin/sh
torrent="$1"
logfile="$2"
echo "Logging to $logfile"
#btdownloadheadless --max_upload_rate 1 "$torrent" > "$logfile" 2>&1
btdownloadheadless --display_interval 10 --max_upload_rate 1 "$torrent" 2>&1 | tee "$logfile"

View File

@ -1,27 +0,0 @@
#!/bin/sh
# Nico Schottelius
# find succussfully downloaded torrents
TOPABS=$(cd $(dirname $0); pwd -P)
TORDIR="$TOPABS/torrents"
LOGDIR="$TOPABS/logs"
OUTDIR="$TOPABS/out"
SAVEDIR="$TOPABS/fertig"
SAVETOR="$SAVEDIR/torrents"
SEARCHTERM='time left: Download Succeeded!'
TESTLINES="20"
# place downloads into current diretory/out
cd "$LOGDIR"
#for torrent in ${TORDIR}/*; do
for log in *; do
tail -n${TESTLINES} "$log" | grep -q "$SEARCHTERM" >/dev/null
if [ "$?" -eq 0 ]; then
echo "$log" finished.
file=$(tail -n${TESTLINES} "$log" | awk -F: '/^download to:/ { print $2 }' | tail -n1 | sed 's;^ */;/;')
mv "${file}" "${SAVEDIR}"
mv "${TORDIR}/${log}" "${SAVETOR}"
rm "$log"
fi
done

View File

@ -1 +0,0 @@
time left: Download Succeeded!

View File

@ -1,5 +0,0 @@
for file in logs/*;
do
echo -n "${file}: ";
tail -n20 "$file" | grep "percent done:" | tail -n1;
done

View File

@ -1,22 +0,0 @@
#!/bin/sh
#set -x
TOPABS=$(cd $(dirname $0); pwd -P)
TORDIR="$TOPABS/torrents"
LOGDIR="$TOPABS/logs"
OUTDIR="$TOPABS/out"
DNLOADR="$TOPABS/downloader.sh"
rm -f "${LOGDIR}/"*
# place downloads into current diretory/out
cd "$OUTDIR"
for torrent in ${TORDIR}/*; do
logfile="$LOGDIR/$(basename "$torrent")"
#echo screen -d -m btdownloadcurses --max_upload_rate 1 "$torrent" G "$logfile"
#screen -d -m btdownloadheadless --max_upload_rate 1 "$torrent" > "$logfile"
screen -d -m "$DNLOADR" "$torrent" "$logfile"
done

View File

@ -1,8 +0,0 @@
#!/bin/sh
#PIDS=$(ps axuwww | awk '/\/usr\/bin\/btdownloadcurses/ { print $2 }')
PIDS=$(ps axuwww | awk '/\/usr\/bin\/btdownloadheadless/ { print $2 }')
kill -TERM $PIDS
sleep 10
kill -KILL $PIDS 2>/dev/null

View File

@ -1,2 +0,0 @@
#!/bin/sh
scp *torrent zuhause.schottelius.org:torrent/t/ && rm *torrent

View File

@ -1,86 +0,0 @@
#!/bin/sh
# Nico Schottelius
# Date: Mit Nov 9 12:57:04 CET 2005
# Last Modified: Thu Nov 10 12:53:00 CET 2005
#
# Install latest git and cogito to $INSTALL_PREFIX/$name and
# link the binaries to $BINDIR
# Needs git and cogito to work!
# And:
# - libexpat-dev
# - libcurl3-dev
# - libssl-dev
# - libz-dev
#
# Configuration
#
BUILDDIR=${BUILDDIR:-/home/user/nico/build}
INSTALL_PREFIX=${INSTALL_PREFIX:-/usr/packages}
BINDIR=${BINDIR:-/usr/local/bin}
MAKE=${MAKE:-make}
#
# No configuration needed below here
#
BASE_GET=http://www.kernel.org/pub/scm/
PROJECTS="git cogito"
# urls for initial cloning
git_url=http://www.kernel.org/pub/software/scm/git/
cogito_url=http://www.kernel.org/pub/software/scm/cogito/
error_in()
{
echo "[failed] $1"
echo "Exiting now."
exit 1
}
for project in $PROJECTS; do
echo "Working on $project (in $BUILDDIR/$project) ... "
# Test for existence: if git or cogito is missing, install it from
# the current archive
which $project > /dev/null
if [ $? -ne 0 ]; then
echo "$project not found, installing from archive"
eval base_url=\$${project}_url
get_url=$(lynx -dump "$base_url" | awk "/$project-([0-9]\.)*tar.bz2\$/ { print \$2 }" | sort -n | tail -n 1)
echo "Retrieving $project from $get_url"
fi
if [ ! -d "$BUILDDIR/$project" ]; then
echo "Cloning $project"
cg-clone "${BASE_GET}${project}/${project}.git" "$BUILDDIR/$project"
else
echo "Updating $project from \"origin\""
cd "$BUILDDIR/$project" || error_in "$BUILDDIR/$project"
cg-update origin
fi
if [ $? -ne 0 ]; then
echo "Pull or clone failed, ABORTING."
exit 23
fi
cd "$BUILDDIR/$project"
version=$(cg-object-id)
echo "Installing $project (Version: $version)"
DDIR=$INSTALL_PREFIX/$project-$version
$MAKE clean || error_in "Cleaning $project"
$MAKE "prefix=$DDIR" all || error_in "Building $project"
$MAKE "prefix=$DDIR" install || error_in "Installing $project"
echo "Linking files to $BINDIR ..."
for file in "$DDIR/bin/"*; do
ln -sf $file "$BINDIR" || error_in "Linking $file"
done
done

View File

@ -1 +0,0 @@
/home/users/nico/localch/vcs/sysadmin-logs/update-puppet-on-host

View File

@ -1,5 +0,0 @@
#!/bin/sh
# Open vim
# x-terminal-emulator -e vim "$@"
urxvt -e vim "$@"

50
wlan
View File

@ -1,50 +0,0 @@
#!/bin/sh -e
# Nico Schottelius, 20080910 11:09
# The initial version
#
# udhcpc -nfqi wlp0s3 && vpnc ~nico/ethz/vpn/pc.conf
#
# Failed due to many reasons. See the working hack/version below.
#
if [ "$#" -ge 1 ]; then
dev=$1; shift
else
dev=wlp3s0
dev=wlan0
dev=wlp2s0
dev=wlp1s0
dev=wlp4s0
dev=wlp3s0
fi
set -x
set +e
wpa_cli terminate || true
#rmmod b43 bcma
sleep 1
#modprobe b43
set -e
#ip l s eth0 down
# Ensure previous wpa_supplicant is not running
( wpa_cli terminate || exit 0 )
# Ensure the adapter works, reloading the driver helps
# ( rmmod iwlagn; modprobe iwlagn; sleep 2 )
# wpa_supplicant -B -Dwext -iwlp0s3 -c /etc/wpa_supplicant/wpa_supplicant.conf
# wpa_supplicant -B -Dwext -i$dev -c /etc/wpa_supplicant/wpa_supplicant.conf
#wpa_supplicant -B -Dnl80211 -i$dev -c /etc/wpa_supplicant/wpa_supplicant.conf
wpa_supplicant -B -Dnl80211 -i$dev -c /etc/wpa_supplicant/wpa_supplicant-${dev}.conf
# Give wpa some time to connect
sleep 5
# Choose an dhcp client
( udhcpc -nqfi $dev || dhcpcd )
wpa_cli
# vpnc ~nico/ethz/vpn/sg.conf
#vpnc ~nico/firmen/ethz/vpn/vpnc.conf
# vpnc-disconnect

View File

@ -1,5 +0,0 @@
#!/bin/sh
x-off
xrandr --output HDMI1 --auto --rotate normal --left-of eDP1
xrandr --output DP1 --auto --rotate normal --left-of eDP1