X-Git-Url: http://git.indexdata.com/?a=blobdiff_plain;f=robot.tcl;h=73d558aad3893ad0b0b823f929247a5cb5d02dac;hb=4355628830cd0f9e27c059d20254d8e1c30896eb;hp=93c45416276eb1ee0fa5886e89d942e3ba9afcbb;hpb=d8234df96ab8fb03ed71f6358f7211ebe725b495;p=tclrobot.git diff --git a/robot.tcl b/robot.tcl index 93c4541..73d558a 100755 --- a/robot.tcl +++ b/robot.tcl @@ -1,24 +1,28 @@ #!/usr/bin/tclsh -# $Id: robot.tcl,v 1.6 2000/12/07 20:16:11 adam Exp $ +# $Id: robot.tcl,v 1.45 2003/06/11 10:29:41 adam Exp $ # -proc RobotFileNext {area} { +proc RobotFileNext1 {area lead} { + # puts "RobotFileNext1 area=$area lead=$lead" if {[catch {set ns [glob ${area}/*]}]} { return {} } - set off [string first / $area] - incr off - foreach n $ns { if {[file isfile $n]} { - if {[string first :.html $n] > 0} { - return http://[string range $area/ $off end] - } - return http://[string range $n $off end] + set off [string last / $n] + # skip / + incr off + set end [string length $n] + # skip _.tkl + incr end -6 + return $lead/[string range $n $off $end] } } foreach n $ns { if {[file isdirectory $n]} { - set sb [RobotFileNext $n] + set off [string last / $n] + # skip / + incr off + set sb [RobotFileNext1 $n $lead/[string range $n $off end]] if {[string length $sb]} { return $sb } @@ -27,34 +31,101 @@ proc RobotFileNext {area} { return {} } -proc RobotFileExist {area host path} { - set comp [split $area/$host$path /] - set l [llength $comp] - incr l -1 - if {![string length [lindex $comp $l]]} { - set comp [split $area/$host$path:.html /] - } elseif {[file exists [join $comp /]]} { - return 1 - } else { - set comp [split $area/$host$path/:.html /] +proc RobotWriteRecord {outf fromurl distance} { + puts $outf {} + puts $outf "" + puts $outf "" + puts $outf $distance + puts $outf "" + puts $outf "" + puts $outf $fromurl + puts $outf "" + puts $outf "" +} + +proc RobotReadRecord {inf fromurlx distancex} { + upvar $fromurlx fromurl + upvar $distancex distance + gets $inf + gets $inf + gets $inf + set distance [string trim [gets $inf]] + # puts "got distance = $distance" + gets $inf + gets $inf + set fromurl [string trim [gets $inf]] +} + +proc RobotFileNext {task area} { + global control + global idletime ns + global status + + # puts "RobotFileNext seq=$control($task,seq)" + if {$control($task,seq) < 0} { + return {} } - return [file exists [join $comp /]] + if {$control($task,seq) == 0} { + if {[catch {set ns($task) [glob $task/$area/*]}]} { + return done + } + } + # puts "ns=$ns($task)" + set off [string length $task/$area] + incr off + set n [lindex $ns($task) $control($task,seq)] + # puts "n=$n" + if {![string length $n]} { + set control($task,seq) -1 + flush stdout + set statusfile [open $task/status w] + puts $statusfile "$status($task,unvisited) $status($task,bad) $status($task,visited)" + close $statusfile + return wait + } + incr control($task,seq) + if {[file isfile $n/robots.txt_.tkl]} { + # puts "ok returning http://[string range $n $off end]/robots.txt" + return http://[string range $n $off end]/robots.txt + } elseif {[file isdirectory $n]} { + set sb [RobotFileNext1 $n http://[string range $n $off end]] + if {[string length $sb]} { + return $sb + } + } + puts "no more work at end of RobotFileNext n=$n" + puts "ns=$ns($task)" + return {} } -proc RobotFileUnlink {area host path} { - set comp [split $area/$host$path /] - set l [llength $comp] - incr l -1 - if {![string length [lindex $comp $l]]} { - set comp [split $area/$host$path:.html /] + +proc RobotFileExist {task area host path} { + global debuglevel + + if {$debuglevel > 3} { + puts "RobotFileExist begin area=$area host=$host path=$path" } - if {[catch {exec rm [join $comp /]}]} return - incr l -1 + return [file exists $task/$area/$host${path}_.tkl] +} + +proc RobotFileUnlink {task area host path} { + global status + # puts "RobotFileUnlink begin" + # puts "area=$area host=$host path=$path" + set npath $task/$area/$host${path}_.tkl + # puts "npath=$npath" + set comp [split $npath /] + if {[catch {exec rm $npath}]} return + + set l [llength $comp] + incr l -2 + incr status($task,$area) -1 for {set i $l} {$i > 0} {incr i -1} { set path [join [lrange $comp 0 $i] /] if {![catch {glob $path/*}]} return - exec rmdir ./$path + exec rmdir $path } + # puts "RobotFileUnlink end" } proc RobotFileClose {out} { @@ -63,97 +134,330 @@ proc RobotFileClose {out} { } } -proc RobotFileOpen {area host path {mode w}} { +proc RobotFileOpen {task area host path {mode w}} { set orgPwd [pwd] global workdir + global status + global debuglevel + + # puts "RobotFileOpen task=$task path=$path" if {![info exists workdir]} { return stdout } - puts "RobotFileOpen orgPwd=$orgPwd area=$area host=$host path=$path" + if {$debuglevel > 3} { + puts "RobotFileOpen orgPwd=$orgPwd area=$area host=$host path=$path mode=$mode" + } if {[string compare $orgPwd $workdir]} { + puts "ooops. RobotFileOpen failed" puts "workdir = $workdir" puts "pwd = $orgPwd" exit 1 } - set comp [split $area/$host$path /] + + set comp [split $task/$area/$host /] set len [llength $comp] incr len -1 - for {set i 0} {$i < $len} {incr i} { + + # puts "1 comp=$comp" + + for {set i 0} {$i <= $len} {incr i} { set d [lindex $comp $i] - if {[catch {cd ./$d}]} { + if {[string length $d] == 0} { + cd / + } elseif {[catch {cd $d}]} { exec mkdir $d cd ./$d + if {![string compare $area unvisited] && $i == $len && $mode == "w"} { + if {[string compare $path /robots.txt]} { + set out [open robots.txt_.tkl w] + puts "creating robots.txt in $d" + close $out + incr status($task,unvisited) + } + } + } + } + + set comp [split $path /] + set len [llength $comp] + incr len -1 + + # puts "2 path=$path comp=$comp" + + for {set i 0} {$i < $len} {incr i} { + set d [lindex $comp $i] + if {[string length $d] > 0} { + if {[catch {cd $d}]} { + exec mkdir $d + cd ./$d + } } } set d [lindex $comp $len] - if {[string length $d]} { - if {[file isdirectory $d]} { - set out [open $d/:.html $mode] - } else { - set out [open $d $mode] - } - } else { - set out [open :.html $mode] + set out [open ${d}_.tkl $mode] + if {$mode == "w"} { + incr status($task,$area) } cd $orgPwd - #puts "RobotFileStop" return $out } -proc RobotRestart {} { +proc RobotStartJob {fname t} { + global control + + set f [open $fname r] + set xml [read $f] + puts "Reading $fname" + close $f + if {![regexp {([^<]*)} $xml x status]} { + return + } + if {$status == "done"} { + puts "already done" + return + } + puts "status = $status" + if {![task $t]} { + return + } + htmlSwitch $xml \ + url { + url $body + } filter { + set type $parm(type) + set action $parm(action) + if {$type == "domain"} { + $action url http://$body/* + } + if {$type == "url"} { + $action url $body + } + if {$type == "mime"} { + $action mime $body + } + } distance { + set control($t,distance) $body + } status { + set control($t,filestatus) $body + } + if {$status == "pending"} { + regsub {[^<]*} $xml {running} xml2 + set f [open $fname w] + puts -nonewline $f $xml2 + close $f + } +} + +proc RobotDoneJob {t} { + global daemon_dir + + if {![info exists daemon_dir]} { + return + } + + set fname $t.tkl + + set f [open $fname r] + set xml [read $f] + puts "Reading $fname" + regexp {([^<]*)} $xml x status + puts "------" + puts "status = $status" + close $f + + regsub {[^<]*} $xml {done} xml2 + set f [open $fname w] + puts -nonewline $f $xml2 + close $f +} + +proc RobotScanDir {} { + global daemon_dir + + if {![info exists daemon_dir]} { + return + } + foreach d $daemon_dir { + if {[catch {set files [glob $d/*.tkl]}]} { + return + } + foreach fname $files { + if {[file isfile $fname] && [file readable $fname]} { + set t [file rootname $fname] + RobotStartJob $fname $t + } + } + } +} + +proc RobotRR {task} { + global control robotsRunning tasks robotsMax status + + puts "RobotRR -- running=$robotsRunning max=$robotsMax---------------" + incr robotsRunning -1 + + # only one task gets through... + if {[string compare [lindex $tasks 0] $task]} { + return + } + puts "RobotRR. task = $task" + while {$robotsRunning} { + vwait robotsRunning + } + puts "Scan" + if {[catch {RobotScanDir} msg]} { + puts "RobotScanDir failed" + puts $msg + } + foreach t $tasks { + set statusfile [open $t/status w] + puts $statusfile "$status($t,unvisited) $status($t,bad) $status($t,visited)" + close $statusfile + set control($t,seq) 0 + RobotStart $t + } +} + +proc RobotDaemonSig {} { + global daemon_cnt + + incr daemon_cnt +} + +proc RobotDaemonLoop {} { + global daemon_cnt tasks robotsRunning status + + set daemon_cnt 0 + while 1 { + puts $daemon_cnt + + RobotScanDir + + if {[info exists tasks]} { + puts "daemon loop tasks $tasks" + foreach t $tasks { + set control($t,seq) 0 + RobotStart $t + } + while {$robotsRunning} { + vwait robotsRunning + } + } + after 30000 RobotDaemonSig + vwait daemon_cnt + } +} + +proc RobotRestart {task url sock} { + global URL robotsRunning + + close $sock + after cancel $URL($sock,cancel) + + foreach v [array names URL $task,$url,*] { + unset URL($v) + } + + incr robotsRunning -1 + RobotStart $task +} + +proc RobotStart {task} { global URL - global robotMoreWork - - while {1} { - set url [RobotFileNext unvisited] + global robotsRunning robotsMax idletime status tasks + + # puts "RobotStart $task running=$robotsRunning" + while {1} { + set url [RobotFileNext $task unvisited] + if {[string compare $url done] == 0} { + puts "In RobotStart task $task done" + + catch {unset ntasks} + foreach t $tasks { + if {[string compare $t $task]} { + lappend ntasks $t + } else { + puts "task $t done" + } + } + if {![info exists ntasks]} { + unset tasks + puts "all done" + } else { + set tasks $ntasks + } + RobotDoneJob $task + return + } if {![string length $url]} { - break + return + } + incr robotsRunning + if {[string compare $url wait] == 0} { + after $idletime [list RobotRR $task] + return } - set r [RobotGetUrl $url {}] + set r [RobotGetUrl $task $url {}] if {!$r} { - puts "RobotGetUrl returned 0 on url=$url" - return + if {$robotsRunning >= $robotsMax} return } else { - RobotFileUnlink unvisited $URL($url,host) $URL($url,path) - } + incr robotsRunning -1 + if {![RobotFileExist $task bad $URL($task,$url,hostport) $URL($task,$url,path)]} { + set outf [RobotFileOpen $task bad $URL($task,$url,hostport) $URL($task,$url,path)] + RobotFileClose $outf + } + RobotFileUnlink $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) + } } - set robotMoreWork 0 } -proc headSave {url out} { +proc headSave {task url out} { global URL - puts $out {} - if {[info exists URL($url,head,last-modified)]} { - puts $out "$URL($url,head,last-modified)" + if {[info exists URL($task,$url,head,last-modified)]} { + puts $out "$URL($task,$url,head,last-modified)" } puts $out {} - if {[info exists URL($url,head,date)]} { - puts $out " $URL($url,head,date)" + if {[info exists URL($task,$url,head,date)]} { + puts $out " $URL($task,$url,head,date)" } - if {[info exists URL($url,head,content-length)]} { - puts $out " $URL($url,head,content-length)" + if {[info exists URL($task,$url,head,content-length)]} { + puts $out " $URL($task,$url,head,content-length)" } - if {[info exists URL($url,head,server)]} { - puts $out " $URL($url,head,server)" + if {[info exists URL($task,$url,head,server)]} { + puts $out " $URL($task,$url,head,server)" } puts $out {} puts $out {} puts $out " $url" - if {[info exists URL($url,head,content-type)]} { - puts $out " $URL($url,head,content-type)" + if {[info exists URL($task,$url,head,content-type)]} { + puts $out " $URL($task,$url,head,content-type)" } puts $out {} } -proc RobotHref {url hrefx hostx pathx} { - global URL domains +proc RobotHref {task url hrefx hostx pathx} { + global URL control debuglevel upvar $hrefx href upvar $hostx host upvar $pathx path - puts "Ref url = $url href=$href" + if {$debuglevel > 1} { + puts "Ref input url = $url href=$href" + } + + if {[string first { } $href] >= 0} { + return 0 + } + if {[string length $href] > 256} { + return 0 + } + +# Skip pages that have ? in them +# if {[string first {?} $url] >= 0 && [string first {?} $href] >= 0} { +# return 0 +# } # get method (if any) if {![regexp {^([^/:]+):(.*)} $href x method hpath]} { set hpath $href @@ -168,26 +472,32 @@ proc RobotHref {url hrefx hostx pathx} { if {![string length $surl]} { set surl / } - set ok 0 - foreach domain $domains { - if {[string match $domain $host]} { - set ok 1 - break + if {[info exist control($task,domains)]} { + set ok 0 + foreach domain $control($task,domains) { + if {[string match $domain $host]} { + set ok 1 + break + } } - } - if {!$ok} { - return 0 - } + if {!$ok} { + return 0 + } + } } else { regexp {^([^\#]*)} $hpath x surl - set host $URL($url,host) + set host $URL($task,$url,hostport) } if {![string length $surl]} { return 0 } if {[string first / $surl]} { # relative path - regexp {^([^\#?]*)} $URL($url,path) x dpart + set curpath $URL($task,$url,path) + if {[info exists URL($task,$url,bpath)]} { + set curpath $URL($task,$url,bpath) + } + regexp {^([^\#?]*)} $curpath x dpart set l [string last / $dpart] if {[expr $l >= 0]} { set surl [string range $dpart 0 $l]$surl @@ -195,330 +505,527 @@ proc RobotHref {url hrefx hostx pathx} { set surl $dpart/$surl } } - set c [split $surl /] - set i [llength $c] - incr i -1 - set path [lindex $c $i] - incr i -1 - while {$i >= 0} { - switch -- [lindex $c $i] { + set surllist [split $surl /] + catch {unset path} + set pathl 0 + foreach c $surllist { + switch -- $c { .. { - incr i -2 - } - . { - incr i -1 + if {$pathl > 1} { + incr pathl -2 + set path [lrange $path 0 $pathl] + incr pathl + } } - default { - set path [lindex $c $i]/$path - incr i -1 + . { + + } + default { + incr pathl + lappend path $c } } - } + } + if {$debuglevel > 4} { + puts "pathl=$pathl output path=$path" + } + set path [join $path /] + if {![string length $path]} { + set path / + } + regsub -all {~} $path {%7E} path set href "$method://$host$path" - puts "Ref href = $href" - return 1 -} -proc Robot401 {url} { - global URL - - puts "Bad URL $url" - set fromurl {} - catch { - set inf [RobotFileOpen unvisited $URL($url,host) $URL($url,path) r] - set fromurl [gets $inf] - close $inf - } - RobotFileUnlink unvisited $URL($url,host) $URL($url,path) - if {![RobotFileExist bad $URL($url,host) $URL($url,path)]} { - set outf [RobotFileOpen bad $URL($url,host) $URL($url,path)] - puts $outf "URL=$url 401" - puts $outf "Reference $fromurl" - RobotFileClose $outf + if {$debuglevel > 1} { + puts "Ref result = $href" } + return [checkrule $task url $href] } -proc Robot404 {url} { +proc RobotError {task url code} { global URL - puts "Bad URL $url" + puts "Bad URL $url (code $code)" set fromurl {} - catch { - set inf [RobotFileOpen unvisited $URL($url,host) $URL($url,path) r] - set fromurl [gets $inf] + set distance -1 + if {[RobotFileExist $task unvisited $URL($task,$url,hostport) $URL($task,$url,path)]} { + set inf [RobotFileOpen $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) r] + RobotReadRecord $inf fromurl distance RobotFileClose $inf } - RobotFileUnlink unvisited $URL($url,host) $URL($url,path) - if {![RobotFileExist bad $URL($url,host) $URL($url,path)]} { - set outf [RobotFileOpen bad $URL($url,host) $URL($url,path)] - puts $outf "URL=$url 404" - puts $outf "Reference $fromurl" + RobotFileUnlink $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) + if {![RobotFileExist $task bad $URL($task,$url,hostport) $URL($task,$url,path)]} { + set outf [RobotFileOpen $task bad $URL($task,$url,hostport) $URL($task,$url,path)] + RobotWriteRecord $outf $fromurl $distance RobotFileClose $outf } - } +} -proc Robot301 {url tourl} { +proc RobotRedirect {task url tourl code} { global URL puts "Redirecting from $url to $tourl" + set distance {} set fromurl {} - catch { - set inf [RobotFileOpen unvisited $URL($url,host) $URL($url,path) r] - set fromurl [gets $inf] + if {[RobotFileExist $task unvisited $URL($task,$url,hostport) $URL($task,$url,path)]} { + set inf [RobotFileOpen $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) r] + RobotReadRecord $inf fromurl distance RobotFileClose $inf } - RobotFileUnlink unvisited $URL($url,host) $URL($url,path) - if {![RobotFileExist bad $URL($url,host) $URL($url,path)]} { - set outf [RobotFileOpen bad $URL($url,host) $URL($url,path)] - puts $outf "URL=$url to $tourl 301" - puts $outf "Reference $fromurl" + if {![RobotFileExist $task bad $URL($task,$url,hostport) $URL($task,$url,path)]} { + set outf [RobotFileOpen $task bad $URL($task,$url,hostport) $URL($task,$url,path)] + RobotWriteRecord $outf $fromurl $distance RobotFileClose $outf } - if {[RobotHref $url tourl host path]} { - if {![RobotFileExist unvisited $host $path]} { - puts "Mark as unvisited" - set outf [RobotFileOpen unvisited $host $path] - puts $outf 301 - RobotFileClose $outf + if {[RobotHref $task $url tourl host path]} { + if {![RobotFileExist $task visited $host $path]} { + if {![RobotFileExist $task unvisited $host $path]} { + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf $fromurl $distance + RobotFileClose $outf + } + } else { + set olddistance {} + set inf [RobotFileOpen $task visited $host $path r] + RobotReadRecord $inf oldurl olddistance + RobotFileClose $inf + if {[string length $olddistance] == 0} { + set olddistance 1000 + } + if {[string length $distance] == 0} { + set distance 1000 + } + puts "distance=$distance olddistance=$olddistance" + if {[expr $distance < $olddistance]} { + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf $tourl $distance + RobotFileClose $outf + } } } + if {[catch {RobotFileUnlink $task unvisited $URL($task,$url,hostport) $URL($task,$url,path)}]} { + puts "unlink failed" + exit 1 + } } -proc RobotTextHtml {url out} { - global URL +proc wellform {body} { + regsub -all {} $body { } abody + regsub -all -nocase {} $abody {} body + regsub -all {<[^\>]+>} $body {} abody + regsub -all { } $abody { } body + regsub -all {&} $body {&} abody + return $abody +} - set head 0 - htmlSwitch $URL($url,buf) \ - title { - if {!$head} { - headSave $url $out - set head 1 +proc link {task url out href body distance} { + global URL control + if {[expr $distance > $control($task,distance)]} return + + if {![RobotHref $task $url href host path]} return + + puts $out "" + puts $out "$href" + set abody [wellform $body] + puts $out "$abody" + puts $out "" + + if {![RobotFileExist $task visited $host $path]} { + set olddistance 1000 + if {![RobotFileExist $task bad $host $path]} { + if {[RobotFileExist $task unvisited $host $path]} { + set inf [RobotFileOpen $task unvisited $host $path r] + RobotReadRecord $inf oldurl olddistance + RobotFileClose $inf } - puts $out "$body" + } else { + set olddistance 0 + } + if {[string length $olddistance] == 0} { + set olddistance 1000 + } + if {[expr $distance < $olddistance]} { + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf $url $distance + RobotFileClose $outf + } + } elseif {[string compare $href $url]} { + set inf [RobotFileOpen $task visited $host $path r] + RobotReadRecord $inf xurl olddistance + close $inf + if {[string length $olddistance] == 0} { + set olddistance 1000 + } + if {[expr $distance < $olddistance]} { + puts "OK remarking url=$url href=$href" + puts "olddistance = $olddistance" + puts "newdistance = $distance" + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf $url $distance + RobotFileClose $outf + } + } +} + +proc RobotTextHtml {task url out} { + global URL control + + # set title so we can emit it for the body + set title {} + # if true, nothing will be indexed + set noindex 0 + # if true, nothing will be followed + set nofollow 0 + + set distance 0 + set fdistance 0 + if {$control($task,distance) < 1000 && [info exists URL($task,$url,dist)]} { + set fdistance $URL($task,$url,dist) + set distance [expr $fdistance + 1] + } + htmlSwitch $URL($task,$url,buf) \ + title { + set title $body } -nonest meta { - if {!$head} { - headSave $url $out - set head 1 - } + # collect metadata and save NAME= CONTENT=.. + set metaname {} + set metacontent {} puts -nonewline $out "" + # go through robots directives (af any) + if {![string compare $metaname robots]} { + set direcs [split [string tolower $metacontent] ,] + if {[lsearch $direcs noindex] >= 0} { + set noindex 1 + } + if {[lsearch $direcs nofollow] >= 0} { + set nofollow 1 + } } - puts $out {>} } body { - regsub -all -nocase {} $body {} abody - regsub -all {<[^\>]+>} $abody {} nbody - puts $out "" - puts $out $nbody - puts $out "" + # don't print title of document content if noindex is used + if {!$noindex} { + puts $out "$title" + set bbody [wellform $body] + puts $out "" + puts $out $bbody + puts $out "" + } + } -nonest base { + # + if {![info exists parm(href)]} { + continue + } + set href [string trim $parm(href)] + if {![RobotHref $task $url href host path]} continue + set URL($task,$url,bpath) $path } a { + # .. + # we're not using nonest - otherwise body isn't set + if {$nofollow} continue + if {![info exists parm(href)]} { + continue + } + link $task $url $out [string trim $parm(href)] $body $distance + } -nonest area { + if {$nofollow} continue if {![info exists parm(href)]} { - puts "no href" continue } - if {!$head} { - headSave $url $out - set head 1 + link $task $url $out [string trim $parm(href)] $body $distance + } -nonest frame { + if {![info exists parm(src)]} { + continue } - if {1} { - set href $parm(href) - if {![RobotHref $url href host path]} continue - - puts $out "" - puts $out "$href" - puts $out "$body" - puts $out "" - - if {![RobotFileExist visited $host $path]} { - if {![RobotFileExist bad $host $path]} { - if {[catch {set outf [RobotFileOpen unvisited $host $path]} msg]} { - puts "--- Error $msg" - exit 1 - } - puts $outf $url - RobotFileClose $outf + link $task $url $out [string trim $parm(src)] $body $fdistance + } +} + +proc RobotsTxt {task url} { + global agent URL + + RobotsTxt0 $task URL(URL($task,$url,hostport),robots) $URL($task,$url,buf) +} + +proc RobotsTxt0 {task v buf} { + global URL agent + set section 0 + foreach l [split $buf \n] { + if {[regexp {([-A-Za-z]+):[ ]*([^\# ]+)} $l match cmd arg]} { + set arg [string trim $arg] + puts "cmd=$cmd arg=$arg" + switch -- [string tolower $cmd] { + user-agent { + if {$section} break + set pat [string tolower $arg]* + set section [string match $pat $agent] + } + disallow { + if {$section} { + puts "rule [list 0 $arg]" + lappend $v [list 0 $arg] + } + } + allow { + if {$section} { + puts "rule [list 1 $arg]" + lappend $v [list 1 $arg] } } } } - if {!$head} { - headSave $url $out - set head 1 } - puts $out "" } -proc RobotTextPlain {url out} { +proc RobotTextPlain {task url out} { global URL - headSave $url $out puts $out "" - puts $out $URL($url,buf) + regsub -all {<} $URL($task,$url,buf) {\<} content + puts $out $content puts $out "" - puts $out "" + + if {![string compare $URL($task,$url,path) /robots.txt]} { + RobotsTxt $task $url + } } -proc Robot200 {url} { - global URL domains - - puts "Parsing $url" - set out [RobotFileOpen visited $URL($url,host) $URL($url,path)] - switch $URL($url,head,content-type) { - text/html { - RobotTextHtml $url $out - } - text/plain { - RobotTextPlain $url $out - } - default { - headSave $url $out - puts $out "" - } +proc RobotWriteMetadata {task url out} { + global URL + + set charset $URL($task,$url,charset) + puts $out "" + puts $out "" + + set distance 1000 + if {[RobotFileExist $task unvisited $URL($task,$url,hostport) $URL($task,$url,path)]} { + set inf [RobotFileOpen $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) r] + RobotReadRecord $inf fromurl distance + RobotFileClose $inf } + set URL($task,$url,dist) $distance + puts $out "" + puts $out " $distance" + puts $out "" + headSave $task $url $out + puts "Parsing $url distance=$distance" + switch $URL($task,$url,head,content-type) { + text/html { + if {[string length $distance]} { + RobotTextHtml $task $url $out + } + } + text/plain { + RobotTextPlain $task $url $out + } + } + puts $out "" +} + +proc Robot200 {task url} { + global URL + + set out [RobotFileOpen $task raw $URL($task,$url,hostport) $URL($task,$url,path)] + puts -nonewline $out $URL($task,$url,buf) RobotFileClose $out - # puts "Parsing done" - RobotFileUnlink unvisited $URL($url,host) $URL($url,path) + + set out [RobotFileOpen $task visited $URL($task,$url,hostport) $URL($task,$url,path)] + RobotWriteMetadata $task $url $out + RobotFileClose $out + + RobotFileUnlink $task unvisited $URL($task,$url,hostport) $URL($task,$url,path) } -proc RobotReadContent {url sock} { +proc RobotReadContent {task url sock binary} { global URL set buffer [read $sock 16384] set readCount [string length $buffer] - + if {$readCount <= 0} { - close $sock - Robot200 $url - RobotRestart + Robot200 $task $url + RobotRestart $task $url $sock + } elseif {!$binary && [string first \0 $buffer] >= 0} { + Robot200 $task $url + RobotRestart $task $url $sock } else { # puts "Got $readCount bytes" - set URL($url,buf) $URL($url,buf)$buffer + set URL($task,$url,buf) $URL($task,$url,buf)$buffer } } -proc RobotReadHeader {url sock} { - global URL +proc RobotReadHeader {task url sock} { + global URL debuglevel - set buffer [read $sock 2148] + if {$debuglevel > 1} { + puts "HTTP head $url" + } + if {[catch {set buffer [read $sock 2148]}]} { + RobotError $task $url 404 + RobotRestart $task $url $sock + return + } set readCount [string length $buffer] if {$readCount <= 0} { - Robot404 $url - close $sock - RobotRestart + RobotError $task $url 404 + RobotRestart $task $url $sock } else { # puts "Got $readCount bytes" - set URL($url,buf) $URL($url,buf)$buffer + set URL($task,$url,buf) $URL($task,$url,buf)$buffer - set n [string first \n\n $URL($url,buf)] + set n [string first \r\n\r\n $URL($task,$url,buf)] if {$n > 1} { set code 0 set version {} - set headbuf [string range $URL($url,buf) 0 $n] - incr n - incr n - set URL($url,buf) [string range $URL($url,buf) $n end] + set headbuf [string range $URL($task,$url,buf) 0 $n] + incr n 4 + set URL($task,$url,charset) ISO-8859-1 + set URL($task,$url,buf) [string range $URL($task,$url,buf) $n end] regexp {^HTTP/([0-9.]+)[ ]+([0-9]+)} $headbuf x version code set lines [split $headbuf \n] foreach line $lines { - if {[regexp {^([^:]+):[ ]+(.*)} $line x name value]} { - set URL($url,head,[string tolower $name]) $value + if {[regexp {^([^:]+):[ ]+([^;]*)} $line x name value]} { + set URL($task,$url,head,[string tolower $name]) [string trim $value] } + regexp {^Content-Type:.*charset=([A-Za-z0-9_-]*)} $line x URL($task,$url,charset) } - puts "code = $code" - set URL($url,state) skip + puts "HTTP CODE $code" + set URL($task,$url,state) skip switch $code { 301 { - Robot301 $url $URL($url,head,location) - close $sock - RobotRestart + RobotRedirect $task $url $URL($task,$url,head,location) 301 + RobotRestart $task $url $sock } 302 { - Robot301 $url $URL($url,head,location) - close $sock - RobotRestart - } - 404 { - Robot404 $url - close $sock - RobotRestart - } - 401 { - Robot401 $url - close $sock - RobotRestart + RobotRedirect $task $url $URL($task,$url,head,location) 302 + RobotRestart $task $url $sock } 200 { - if {![info exists URL($url,head,content-type)]} { - set URL($url,head,content-type) {} + if {![info exists URL($task,$url,head,content-type)]} { + set URL($task,$url,head,content-type) {} } - switch $URL($url,head,content-type) { - text/html { - fileevent $sock readable [list RobotReadContent $url $sock] - } - text/plain { - fileevent $sock readable [list RobotReadContent $url $sock] - } - default { - close $sock - Robot200 $url - RobotRestart + set binary 1 + switch -glob -- $URL($task,$url,head,content-type) { + text/* { + set binary 0 } } + if {![regexp {/robots.txt$} $url]} { + if {![checkrule $task mime $URL($task,$url,head,content-type)]} { + RobotError $task $url mimedeny + RobotRestart $task $url $sock + return + } + } + fileevent $sock readable [list RobotReadContent $task $url $sock $binary] } default { - Robot404 $url - close $sock - RobotRestart + RobotError $task $url $code + RobotRestart $task $url $sock } } } } } -proc RobotConnect {url sock} { - global URL agent +proc RobotSockCancel {task url sock} { + + puts "RobotSockCancel sock=$sock url=$url" + RobotError $task $url 401 + RobotRestart $task $url $sock +} - fconfigure $sock -translation {auto crlf} -blocking 0 - puts "Reading $url" - fileevent $sock readable [list RobotReadHeader $url $sock] - puts $sock "GET $URL($url,path) HTTP/1.0" - puts $sock "Host: $URL($url,host)" +proc RobotConnect {task url sock} { + global URL agent acceptLanguage + + fconfigure $sock -translation {lf crlf} -blocking 0 + fileevent $sock readable [list RobotReadHeader $task $url $sock] + puts $sock "GET $URL($task,$url,path) HTTP/1.0" + puts $sock "Host: $URL($task,$url,host)" puts $sock "User-Agent: $agent" + if {[string length $acceptLanguage]} { + puts $sock "Accept-Language: $acceptLanguage" + } puts $sock "" - flush $sock + set URL($sock,cancel) [after 30000 [list RobotSockCancel $task $url $sock]] + if {[catch {flush $sock}]} { + RobotError $task $url 404 + RobotRestart $task $url $sock + } } proc RobotNop {} { } -proc RobotGetUrl {url phost} { - global URL - puts "---------" - puts $url - if {![regexp {([^:]+)://([^/]+)([^ ]*)} $url x method hostport path]} { +proc RobotGetUrl {task url phost} { + global URL robotsRunning + flush stdout + puts "Retrieve running=$robotsRunning url=$url task=$task" + if {![regexp {([^:]+)://([^/]+)(.*)} $url x method hostport path]} { return -1 } if {![regexp {([^:]+):([0-9]+)} $hostport x host port]} { set port 80 set host $hostport } - set URL($url,method) $method - set URL($url,host) $host - set URL($url,port) $port - set URL($url,path) $path - set URL($url,state) head - set URL($url,buf) {} + set URL($task,$url,method) $method + set URL($task,$url,host) $host + set URL($task,$url,hostport) $hostport + set URL($task,$url,path) $path + set URL($task,$url,state) head + set URL($task,$url,buf) {} + + if {[string compare $path /robots.txt]} { + set ok 1 + if {![info exists URL($hostport,robots)]} { + puts "READING robots.txt for host $hostport" + if {[RobotFileExist $task visited $hostport /robots.txt]} { + set inf [RobotFileOpen $task visited $hostport /robots.txt r] + set buf [read $inf 32768] + close $inf + } else { + set buf "User-agent: *\nAllow: /\n" + } + RobotsTxt0 $task URL($hostport,robots) $buf + } + if {[info exists URL($hostport,robots)]} { + foreach l $URL($hostport,robots) { + if {[string first [lindex $l 1] $path] == 0} { + set ok [lindex $l 0] + break + } + } + } + if {!$ok} { + puts "skipped due to robots.txt" + return -1 + } + } if [catch {set sock [socket -async $host $port]}] { return -1 } - RobotConnect $url $sock + RobotConnect $task $url $sock return 0 } @@ -530,57 +1037,267 @@ if {![llength [info commands htmlSwitch]]} { } } - -set agent "zmbot/0.0" +set agent "zmbot/0.2" if {![catch {set os [exec uname -s -r]}]} { set agent "$agent ($os)" - puts "agent: $agent" } +puts "agent: $agent" + proc bgerror {m} { + global errorInfo puts "BGERROR $m" + puts $errorInfo } -if {0} { - proc RobotRestart {} { - global robotMoreWork - set robotMoreWork 0 - puts "myrestart" +set robotsRunning 0 +set workdir [pwd] +set idletime 30000 +set acceptLanguage {} +set debuglevel 0 + +# Rules: allow, deny, url + +proc checkrule {task type this} { + global control + global debuglevel + + set default_ret 1 + + if {$debuglevel > 3} { + puts "CHECKRULE $type $this" } - set robotMoreWork 1 - set url {http://www.indexdata.dk/zap/} - RobotGetUrl $url {} - while {$robotMoreWork} { - vwait robotMoreWork + if {[info exist control($task,alrules)]} { + foreach l $control($task,alrules) { + if {$debuglevel > 3} { + puts "consider $l" + } + # consider type + if {[lindex $l 1] != $type} continue + # consider mask (! negates) + set masks [lindex $l 2] + set ok 0 + set default_ret 0 + foreach mask $masks { + if {$debuglevel > 4} { + puts "consider single mask $mask" + } + if {[string index $mask 0] == "!"} { + set mask [string range $mask 1 end] + if {[string match $mask $this]} continue + } else { + if {![string match $mask $this]} continue + } + set ok 1 + } + if {$debuglevel > 4} { + puts "ok = $ok" + } + if {!$ok} continue + # OK, we have a match + if {[lindex $l 0] == "allow"} { + if {$debuglevel > 3} { + puts "CHECKRULE MATCH OK" + } + return 1 + } else { + if {$debuglevel > 3} { + puts "CHECKFULE MATCH FAIL" + } + return 0 + } + } } - puts "-----------" - puts $URL($url,buf) - puts "-----------" - exit 1 + if {$debuglevel > 3} { + puts "CHECKRULE MATCH DEFAULT $default_ret" + } + return $default_ret } -set robotMoreWork 0 -set workdir [pwd] -if {[llength $argv] < 2} { - puts "Tclrobot: usage " - puts " Example: '*.indexdata.dk' http://www.indexdata.dk/" +proc url {href} { + global debuglevel task + + if {[RobotHref $task http://www.indexdata.dk/ href host path]} { + if {![RobotFileExist $task visited $host $path]} { + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf href 0 + RobotFileClose $outf + } + } +} + +proc deny {type stuff} { + global control task + + lappend control($task,alrules) [list deny $type $stuff] +} + +proc allow {type stuff} { + global control task + + lappend control($task,alrules) [list allow $type $stuff] +} + +proc debug {level} { + global debuglevel + + set debuglevel $level +} + +proc task {t} { + global tasks task status control + + set task $t + + if {[info exists tasks]} { + if {[lsearch -exact $tasks $t] >= 0} { + return 0 + } + } + + lappend tasks $t + set status($t,unvisited) 0 + set status($t,visited) 0 + set status($t,bad) 0 + set status($t,raw) 0 + set status($t,active) 1 + set control($t,seq) 0 + set control($t,distance) 10 + return 1 +} + +# Little utility that ensures that at least one task is present (main). +proc chktask {} { + global tasks + if {![info exist tasks]} { + task main + } +} + + +# Parse options + +set i 0 +set l [llength $argv] + +if {$l < 2} { + puts {tclrobot: usage:} + puts {tclrobot [-j jobs] [-i idle] [-c count] [-d domain] [-D dir] [-r rules] [url ..]} + puts " Example: -c 3 -d '*.dk' http://www.indexdata.dk/" + exit 1 } -set domains [lindex $argv 0] -set site [lindex $argv 1] -if {[string length $site]} { - set robotMoreWork 1 - if [RobotGetUrl $site {}] { - set robotMoreWork 0 - puts "Couldn't process $site" - } else { - #set x [RobotFileOpen unvisited $site /robots.txt] - #RobotFileClose $x + +while {$i < $l} { + set arg [lindex $argv $i] + switch -glob -- $arg { + -t* { + set t [string range $arg 2 end] + if {![string length $t]} { + set t [lindex $argv [incr i]] + } + task $t + } + -D* { + set dir [string range $arg 2 end] + if {![string length $dir]} { + set dir [lindex $argv [incr i]] + } + lappend daemon_dir $dir + } + -j* { + set robotsMax [string range $arg 2 end] + if {![string length $robotsMax]} { + set robotsMax [lindex $argv [incr i]] + } + } + -c* { + chktask + set control($task,distance) [string range $arg 2 end] + if {![string length $control($task,distance)]} { + set control($task,distance) [lindex $argv [incr i]] + } + } + -d* { + chktask + set dom [string range $arg 2 end] + if {![string length $dom]} { + set dom [lindex $argv [incr i]] + } + lappend control($task,domains) $dom + } + -i* { + set idletime [string range $arg 2 end] + if {![string length $idletime]} { + set idletime [lindex $argv [incr i]] + } + } + -l* { + chktask + set acceptLanguage [string range $arg 2 end] + if {![string length $acceptLanguage]} { + set acceptLanguage [lindex $argv [incr i]] + } + } + -r* { + chktask + set rfile [string range $arg 2 end] + if {![string length $rfile]} { + set rfile [lindex $argv [incr i]] + } + catch {unset maxdistance} + source $rfile + if {[info exists maxdistance]} { + set control($task,distance) $maxdistance + } + } + default { + chktask + set href $arg + if {[RobotHref $task http://www.indexdata.dk/ href host path]} { + if {![RobotFileExist $task visited $host $path]} { + set outf [RobotFileOpen $task unvisited $host $path] + RobotWriteRecord $outf href 0 + RobotFileClose $outf + } + } + } } + incr i } -while {$robotMoreWork} { - vwait robotMoreWork +if {![info exist robotsMax]} { + set robotsMax 5 } + +if {[info exist daemon_dir]} { + RobotDaemonLoop +} else { + foreach t $tasks { + puts "task $t" + puts "max distance=$control($t,distance)" + if {[info exists control($t,domains)]} { + puts "domains=$control($t,domains)" + } + } + puts "max jobs=$robotsMax" + + foreach t $tasks { + RobotStart $t + } + + while {$robotsRunning} { + vwait robotsRunning + } + + if {[info exists tasks]} { + foreach t $tasks { + set statusfile [open $t/status w] + puts $statusfile "$status($t,unvisited) $status($t,bad) $status($t,visited)" + close $statusfile + } + } +} +