M autocompare/main.go +9 -4
@@ 31,7 31,7 @@ var nthreads int
var threshold uint
var nrmsdThresh float64
var nrmsdOnly bool
-var totalPages int
+var totalPages, totalFails int
var version string
@@ 62,7 62,7 @@ func main() {
MB := uint64(1024)
for {
runtime.ReadMemStats(mem)
- log.Printf("memory %d %d - %d", mem.Alloc / MB, mem.Sys / MB, totalPages)
+ log.Printf("memory %d %d - %d %d", mem.Alloc / MB, mem.Sys / MB, totalPages, totalFails)
time.Sleep(10 * time.Second)
}
}()
@@ 136,10 136,13 @@ func startLogger(pages *sync.WaitGroup)
resultMessage = fmt.Sprintf("OK %s", page.Message())
case FAIL, PANIC:
resultMessage = fmt.Sprintf("FAIL %s", page.Message())
+ totalFails++
case NEW:
resultMessage = fmt.Sprintf("ERROR shouldn't be getting NEW")
+ totalFails++
default:
resultMessage = fmt.Sprint("ERROR unexpected %d", page.State())
+ totalFails++
}
fmt.Printf("%s,%s,%s,%s,%s,%s,%d,%s,%t\n", page.Id(), page.AdID(), page.Subdomain(), page.Domain(), page.OriginURL(), page.CopyURL(), page.Attempts(), resultMessage, page.Links())
totalPages++
@@ 225,10 228,12 @@ func startComparisons(pageStream chan Pa
if erra != nil || errb != nil {
page.SetState(PANIC)
if erra != nil {
- page.SetMessage("Unable to read " + page.OriginImage())
+ log.Print(erra)
+ page.SetMessage("Unable to read origin " + page.OriginImage())
}
if errb != nil {
- page.SetMessage("Unable to read " + page.CopyImage())
+ log.Print(errb)
+ page.SetMessage("Unable to read copy " + page.CopyImage())
}
processStream <- page
} else {
R cdr.go => +0 -56
@@ 1,56 0,0 @@
-package autocompare
-
-import (
- "encoding/csv"
- "fmt"
- "io"
- "log"
-)
-
-type CDR struct {
- BasePage
-}
-
-func (p CDR) OriginURL() string {
- return fmt.Sprintf("http://%s.ybsites2.com%s", p.subdomain, p.origPath)
-}
-
-func (p CDR) CopyURL() string {
- return fmt.Sprintf("http://%s.ybsitecenter.com%s", p.subdomain, p.path)
-}
-
-type CDRSites struct {
- *csv.Reader
- *SitesBase
-}
-
-func (s *CDRSites) CheckLinks() bool {
- return true
-}
-
-func (s *CDRSites) NextPage() Page {
- line, err := s.Read()
- if err == io.EOF {
- return nil
- }
- var state int
- var message string
- if len(line) < 12 {
- if len(line) < 11 {
- log.Printf("bad input data: %v", line)
- return s.NextPage()
- } else {
- s.counts[line[5]]++
- state = PANIC
- message = "MIGRATION " + line[5]
- }
- } else if line[5] == "1" && line[11] != "" {
- s.processed++
- state = NEW
- } else {
- s.counts[line[5]]++
- state = PANIC
- message = "MIGRATION " + line[5]
- }
- return &CDR{BasePage{attempts: 0, subdomain: line[3], domain1: line[4], origPath: line[7], path: line[8], adID: line[6], origImg: "", copyImg: "", state: state, id: line[2], message: message}}
-}
M fetchpage.go +4 -1
@@ 80,7 80,9 @@ func fetchPage(phantomUrl string, timeou
fname := fmt.Sprintf("autocompare%x.png", h.Sum(nil))
outFileName := filepath.Join(os.TempDir(), fname)
- resp, err := http.PostForm(phantomUrl, url.Values{"output": {outFileName}, "address": {src}, "timeout": {timeout}})
+ vals := url.Values{"output": {outFileName}, "address": {src}, "timeout": {timeout}}
+ //log.Printf("sending %v", vals)
+ resp, err := http.PostForm(phantomUrl, vals)
if resp != nil && resp.Body != nil {
defer resp.Body.Close()
}
@@ 99,6 101,7 @@ func fetchPage(phantomUrl string, timeou
log.Printf("ERROR! bad server response %s %s %s\n", resp.Status, string(bdy), src)
results <- &Result{"", false, true}
} else {
+ //log.Printf("got back 200 success for %s", outFileName)
results <- &Result{outFileName, true, true}
}
}
M foundation.go +1 -14
@@ 2,22 2,9 @@ package autocompare
import (
"encoding/csv"
- "fmt"
"io"
)
-type Foundation struct {
- BasePage
-}
-
-func (p Foundation) OriginURL() string {
- return fmt.Sprintf("http://%s.ybsites.com", p.subdomain)
-}
-
-func (p Foundation) CopyURL() string {
- return fmt.Sprintf("http://%s.ybsitecenter.com", p.subdomain)
-}
-
type FoundationSites struct {
*csv.Reader
*SitesBase
@@ 42,5 29,5 @@ func (s *FoundationSites) NextPage() Pag
state = PANIC
message = "MIGRATION broken"
}
- return &Foundation{BasePage{attempts: 0, subdomain: line[2], domain1: line[7], adID: line[6], state: state, message: message, id: line[1]}}
+ return &BasePage{attempts: 0, subdomain: line[2], domain1: line[7], adID: line[6], state: state, message: message, id: line[1], baseURL: s.baseURL, uat: s.uat}
}
R kop.go => +0 -57
@@ 1,57 0,0 @@
-package autocompare
-
-import (
- "encoding/csv"
- "fmt"
- "io"
-)
-
-type KOP struct {
- BasePage
-}
-
-func (p KOP) OriginURL() string {
- return fmt.Sprintf("http://%s.ybsites.com%s", p.subdomain, p.origPath)
-}
-
-func (p KOP) CopyURL() string {
- return fmt.Sprintf("http://%s.ybsitecenter.com%s", p.subdomain, p.path)
-}
-
-type KOPSites struct {
- *csv.Reader
- *SitesBase
-}
-
-func (s *KOPSites) CheckLinks() bool {
- return true
-}
-
-func (s *KOPSites) NextPage() Page {
- line, err := s.Read()
- if err == io.EOF {
- return nil
- }
- var state int
- var message string
- if len(line) < 12 {
- if len(line) < 11 {
- log.Printf("bad input data: %v", line)
- return s.NextPage()
- } else {
- s.counts[line[5]]++
- state = PANIC
- message = "MIGRATION " + line[5]
- }
- }
- if line[5] == "1" && line[11] != "" {
- s.processed++
- state = NEW
- } else {
- s.counts[line[5]]++
- state = PANIC
- message = "MIGRATION " + line[5]
- }
- return &KOP{BasePage{attempts: 0, subdomain: line[3], domain1: line[4], origPath: line[7], path: line[8], adID: line[6], origImg: "", copyImg: "", state: state, message: message, id: line[2]}}
-}
-// vim:ts=4:sw=4:noet
M loader.go +11 -8
@@ 16,6 16,8 @@ type Sites interface {
type SitesBase struct {
processed int
counts map[string]int
+ baseURL string
+ uat bool
}
func (s SitesBase) Processed() int { return s.processed }
@@ 47,20 49,21 @@ func NewSites(fileName string) Sites {
reader.TrailingComma = true
reader.Read() // Get rid of header
var sites Sites
- base := SitesBase{0, make(map[string]int)}
+ base := SitesBase{0, make(map[string]int), "", false}
switch source {
case "cdr":
- sites = &CDRSites{reader, &base}
+ base.baseURL = "http://%s.ybsites2.com%s"
case "kop":
- if sourceIdx == 0 {
- sites = &FoundationSites{reader, &base}
- } else {
- sites = &KOPSites{reader, &base}
- }
+ base.baseURL = "http://%s.ybsites.com%s"
case "uk":
- sites = &UKSites{reader, &base}
+ base.baseURL = "http://%s.yellsites.co.uk%s"
default:
log.Fatalf("Unknown source %s", source)
}
+ if sourceIdx == 0 {
+ sites = &FoundationSites{reader, &base}
+ } else {
+ sites = &MultiSites{reader, &base}
+ }
return sites
}
A => multisites.go +43 -0
@@ 0,0 1,43 @@
+package autocompare
+
+import (
+ "encoding/csv"
+ "io"
+ "log"
+)
+
+type MultiSites struct {
+ *csv.Reader
+ *SitesBase
+}
+
+func (s *MultiSites) CheckLinks() bool {
+ return true
+}
+
+func (s *MultiSites) NextPage() Page {
+ line, err := s.Read()
+ if err == io.EOF {
+ return nil
+ }
+ var state int
+ var message string
+ if len(line) < 12 {
+ if len(line) < 11 {
+ log.Printf("bad input data: %v", line)
+ return s.NextPage()
+ } else {
+ s.counts[line[5]]++
+ state = PANIC
+ message = "MIGRATION " + line[5]
+ }
+ } else if line[5] == "1" && line[11] != "" {
+ s.processed++
+ state = NEW
+ } else {
+ s.counts[line[5]]++
+ state = PANIC
+ message = "MIGRATION " + line[5]
+ }
+ return &BasePage{attempts: 0, subdomain: line[3], domain1: line[4], origPath: line[7], path: line[8], adID: line[6], origImg: "", copyImg: "", state: state, id: line[2], message: message, baseURL: s.baseURL, uat: s.uat}
+}
M page.go +15 -0
@@ 1,5 1,7 @@
package autocompare
+import "fmt"
+
const (
SUCCESS = iota
FAIL
@@ 41,6 43,8 @@ type BasePage struct {
message string
id string
links bool
+ baseURL string
+ uat bool
}
func (p BasePage) Subdomain() string { return p.subdomain }
@@ 74,3 78,14 @@ func (p *BasePage) SetMessage(s string)
func (p BasePage) Links() bool { return p.links }
func (p *BasePage) SetLinks(s bool) { p.links = s }
+
+func (p BasePage) OriginURL() string {
+ return fmt.Sprintf(p.baseURL, p.subdomain, p.origPath)
+}
+
+func (p BasePage) CopyURL() string {
+ if p.uat {
+ return fmt.Sprintf("http://%s.fwuate2.yb.int%s", p.subdomain, p.path)
+ }
+ return fmt.Sprintf("http://%s.ybsitecenter.com%s", p.subdomain, p.path)
+}
M scripts/memuse +4 -3
@@ 13,7 13,7 @@ fi
BASE=$1
DAT=$BASE/mem.dat
-awk '/memory/ {print $1" "$2","$4","$5","$7}' < $BASE/errs.txt > $DAT
+awk '/^[0-9]+\/[0-9]+\/[0-9]+ [0-9]+:[0-9]+:[0-9]+ memory/ {print $1" "$2","$4","$5","$7}' < $BASE/errs.txt > $DAT
gnuplot <<EOF
set datafile separator ","
@@ 25,11 25,12 @@ set xlabel "Time"
set xdata time
set timefmt "%Y/%m/%d %H:%M:%S"
set y2tics
+set key left
set xtics format "%H:%M:%S"
-plot '${DAT}' using 1:2 title "Alloc" with points, \
+plot '${DAT}' using 1:2 title "Alloc" with dots, \
'${DAT}' using 1:2 title "Alloc smoothed" smooth bezier with lines, \
'${DAT}' using 1:3 title "System" with lines, \
- '${DAT}' using 1:4 title "Comparisons" smooth bezier with lines axis x1y2
+ '${DAT}' using 1:4 title "Comparisons" with lines axis x1y2
EOF
if [[ $GZIP -eq 1 ]]; then
M scripts/postprocess +1 -1
@@ 41,4 41,4 @@ egrep -e 'OK.*true$' ${PREV}/sites.csv |
${MYPATH}/memuse.plot ${PREV}
cp ${MYPATH}/../run_readme.txt ${PREV}/README.txt
zip -r ${PREV}.zip ${PREV}
-( cat ${PREV}/stats.txt ; uuencode ${PREV}.zip ${PREV}.zip ) | mail -s "Autocompare results ${PREV}" erick.moeller@hibu.com Tommy.Jorgensen@kapowsoftware.com todd.little@hibu.com sean.russell@hibu.com
+# ( cat ${PREV}/stats.txt ; uuencode ${PREV}.zip ${PREV}.zip ) | mail -s "Autocompare results ${PREV}" erick.moeller@hibu.com Tommy.Jorgensen@kapowsoftware.com todd.little@hibu.com sean.russell@hibu.com
M scripts/progress +0 -1
@@ 29,6 29,5 @@ while true; do
ETA=`date -d "$TIMEREMAIN seconds" +%H:%M:%S`
printf "Remaining time est.: %02d:%02d:%02d (%s)\n" $(($TIMEREMAIN/3600)) $(($TIMEREMAIN%3600/60)) $(($TIMEREMAIN%60)) $ETA
${D}/memuse -u $BASE
- cp ${BASE}/memuse.svg ~/public_html/memuse.svg
sleep 30
done
R uk.go => +0 -47
@@ 1,47 0,0 @@
-package autocompare
-
-import (
- "encoding/csv"
- "fmt"
- "io"
-)
-
-type UK struct {
- BasePage
-}
-
-func (p UK) OriginURL() string {
- return fmt.Sprintf("http://%s.yellsites.co.uk%s", p.subdomain, p.origPath)
-}
-
-func (p UK) CopyURL() string {
- //return fmt.Sprintf("http://%s.ybsitecenter.com%s", p.subdomain, p.path)
- return fmt.Sprintf("http://%s.fwuate2.yb.int%s", p.subdomain, p.path)
-}
-
-type UKSites struct {
- *csv.Reader
- *SitesBase
-}
-
-func (s *UKSites) CheckLinks() bool {
- return true
-}
-
-func (s *UKSites) NextPage() Page {
- line, err := s.Read()
- if err == io.EOF {
- return nil
- }
- var state int
- var message string
- if line[5] == "1" && line[11] != "" {
- s.processed++
- state = NEW
- } else {
- s.counts[line[5]]++
- state = PANIC
- message = "MIGRATION " + line[5]
- }
- return &UK{BasePage{attempts: 0, subdomain: line[3], domain1: line[4], origPath: line[7], path: line[8], adID: line[6], origImg: "", copyImg: "", state: state, message: message, id: line[2]}}
-}