@@ -15,16 +15,23 @@ import (
1515// SimpleTracker accepts url and xpath to extract content
1616// and returns content/error message, ok
1717func SimpleTracker (url , xpath * string ) (content string , ok bool ) {
18+ defer func () {
19+ if ! ok {
20+ log .Println (content )
21+ }
22+ log .Println ("INFO: Found" , content , "from" , * url )
23+ }()
24+
1825 xpExec , err := xmlpath .Compile (* xpath )
1926 if err != nil {
20- log . Printf ( "ERROR: failed to compile xpath %s" , * xpath )
27+ content = "ERROR: failed to compile xpath %s" + * xpath
2128 ok = false
2229 return
2330 }
2431
2532 resp , getErr := http .Get (* url )
2633 if getErr != nil {
27- log . Println ( "ERROR: failed to fetch the website" )
34+ content = "ERROR: failed to fetch the website"
2835 ok = false
2936 return
3037 }
@@ -36,19 +43,15 @@ func SimpleTracker(url, xpath *string) (content string, ok bool) {
3643 xmlRoot , xmlErr := xmlpath .ParseHTML (reader )
3744 if xmlErr != nil {
3845 content = "ERROR: parse xml error: " + xmlErr .Error ()
39- log .Println (content )
4046 ok = false
4147 return
4248 }
43- value , found : = xpExec .String (xmlRoot )
44- if ! found {
45- ok = false
49+ content , ok = xpExec .String (xmlRoot )
50+ content = strings . TrimSpace ( content )
51+ if ! ok {
4652 content = "value not found"
4753 return
4854 }
49- log .Println ("INFO: Found" , value , "from" , * url )
50- content = value
51- ok = true
5255 }
5356
5457 // step 1. read directly from body
@@ -59,14 +62,12 @@ func SimpleTracker(url, xpath *string) (content string, ok bool) {
5962 root , err := html .Parse (bytes .NewReader (body ))
6063 if err != nil {
6164 content = "ERROR: parse html" + err .Error ()
62- log .Println (content )
6365 return
6466 }
6567 var b bytes.Buffer
6668 html .Render (& b , root )
6769 extractHelper (bytes .NewReader (b .Bytes ()))
6870 }
6971
70- strings .TrimSpace (content )
7172 return
7273}
0 commit comments