[TASK] improve logging

This commit is contained in:
Martin/Geno 2019-01-17 13:26:16 +01:00 committed by genofire
parent abae92bb5a
commit 27fde7cd8c
20 changed files with 152 additions and 72 deletions

View File

@ -5,11 +5,12 @@ import (
"io/ioutil"
"os"
"github.com/naoina/toml"
"github.com/FreifunkBremen/yanic/database"
"github.com/FreifunkBremen/yanic/respond"
"github.com/FreifunkBremen/yanic/runtime"
"github.com/FreifunkBremen/yanic/webserver"
"github.com/naoina/toml"
)
// Config represents the whole configuration

View File

@ -1,12 +1,12 @@
package cmd
import (
"log"
"github.com/bdlm/log"
"github.com/spf13/cobra"
allDatabase "github.com/FreifunkBremen/yanic/database/all"
"github.com/FreifunkBremen/yanic/rrd"
"github.com/FreifunkBremen/yanic/runtime"
"github.com/spf13/cobra"
)
// importCmd represents the import command
@ -23,11 +23,11 @@ var importCmd = &cobra.Command{
err := allDatabase.Start(config.Database)
if err != nil {
panic(err)
log.Panicf("could not connect to database: %s", err)
}
defer allDatabase.Close()
log.Println("importing RRD from", path)
log.Infof("importing RRD from %s", path)
for ds := range rrd.Read(path) {
allDatabase.Conn.InsertGlobals(

View File

@ -2,14 +2,16 @@ package cmd
import (
"encoding/json"
"log"
"fmt"
"net"
"strings"
"time"
"github.com/bdlm/log"
"github.com/spf13/cobra"
"github.com/FreifunkBremen/yanic/respond"
"github.com/FreifunkBremen/yanic/runtime"
"github.com/spf13/cobra"
)
var (
@ -28,7 +30,10 @@ var queryCmd = &cobra.Command{
ifaces := strings.Split(args[0], ",")
dstAddress := net.ParseIP(args[1])
log.Printf("Sending request address=%s ifaces=%s", dstAddress, ifaces)
log.WithFields(map[string]interface{}{
"address": dstAddress,
"ifaces": ifaces,
}).Info("sending request")
var ifacesConfigs []respond.InterfaceConfig
for _, iface := range ifaces {
@ -52,13 +57,13 @@ var queryCmd = &cobra.Command{
for id, data := range nodes.List {
jq, err := json.Marshal(data)
if err != nil {
log.Printf("%s: %+v", id, data)
fmt.Printf("%s: %+v", id, data)
} else {
jqNeighbours, err := json.Marshal(data.Neighbours)
if err != nil {
log.Printf("%s: %s neighbours: %+v", id, string(jq), data.Neighbours)
fmt.Printf("%s: %s neighbours: %+v", id, string(jq), data.Neighbours)
} else {
log.Printf("%s: %s neighbours: %s", id, string(jq), string(jqNeighbours))
fmt.Printf("%s: %s neighbours: %s", id, string(jq), string(jqNeighbours))
}
}
}

View File

@ -2,14 +2,16 @@ package cmd
import (
"fmt"
"log"
"os"
"github.com/bdlm/log"
"github.com/bdlm/std/logger"
"github.com/spf13/cobra"
)
var (
timestamps bool
loglevel uint32
)
// RootCmd represents the base command when called without any subcommands
@ -35,12 +37,12 @@ func init() {
// Cobra supports persistent flags, which, if defined here,
// will be global for your application.
RootCmd.PersistentFlags().BoolVar(&timestamps, "timestamps", false, "Enables timestamps for log output")
RootCmd.PersistentFlags().Uint32Var(&loglevel, "loglevel", 40, "Show log message starting at level")
}
func initConfig() {
if timestamps {
log.SetFlags(log.Lshortfile)
} else {
log.SetFlags(log.LstdFlags | log.Lshortfile)
}
log.SetLevel(logger.Level(loglevel))
log.SetFormatter(&log.TextFormatter{
DisableTimestamp: timestamps,
})
}

View File

@ -1,18 +1,19 @@
package cmd
import (
"log"
"os"
"os/signal"
"syscall"
"time"
"github.com/bdlm/log"
"github.com/spf13/cobra"
allDatabase "github.com/FreifunkBremen/yanic/database/all"
allOutput "github.com/FreifunkBremen/yanic/output/all"
"github.com/FreifunkBremen/yanic/respond"
"github.com/FreifunkBremen/yanic/runtime"
"github.com/FreifunkBremen/yanic/webserver"
"github.com/spf13/cobra"
)
// serveCmd represents the serve command
@ -25,7 +26,7 @@ var serveCmd = &cobra.Command{
err := allDatabase.Start(config.Database)
if err != nil {
panic(err)
log.Panicf("could not connect to database: %s", err)
}
defer allDatabase.Close()
@ -34,12 +35,12 @@ var serveCmd = &cobra.Command{
err = allOutput.Start(nodes, config.Nodes)
if err != nil {
panic(err)
log.Panicf("error on init outputs: %s", err)
}
defer allOutput.Close()
if config.Webserver.Enable {
log.Println("starting webserver on", config.Webserver.Bind)
log.Infof("starting webserver on %s", config.Webserver.Bind)
srv := webserver.New(config.Webserver.Bind, config.Webserver.Webroot)
go webserver.Start(srv)
defer srv.Close()
@ -50,7 +51,7 @@ var serveCmd = &cobra.Command{
if duration := config.Respondd.Synchronize.Duration; duration > 0 {
now := time.Now()
delay := duration - now.Sub(now.Truncate(duration))
log.Printf("delaying %0.1f seconds", delay.Seconds())
log.Infof("delaying %0.1f seconds", delay.Seconds())
time.Sleep(delay)
}
@ -63,7 +64,7 @@ var serveCmd = &cobra.Command{
sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
sig := <-sigs
log.Println("received", sig)
log.Infof("received %s", sig)
},
}

View File

@ -2,9 +2,10 @@ package all
import (
"fmt"
"log"
"time"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/database"
"github.com/FreifunkBremen/yanic/runtime"
)
@ -19,7 +20,7 @@ func Connect(allConnection map[string]interface{}) (database.Connection, error)
for dbType, conn := range database.Adapters {
configForType := allConnection[dbType]
if configForType == nil {
log.Printf("the output type '%s' has no configuration", dbType)
log.WithField("database", dbType).Infof("no configuration found")
continue
}
dbConfigs, ok := configForType.([]interface{})

View File

@ -1,11 +1,12 @@
package graphite
import (
"log"
"sync"
"github.com/FreifunkBremen/yanic/database"
"github.com/bdlm/log"
"github.com/fgrosse/graphigo"
"github.com/FreifunkBremen/yanic/database"
)
const (
@ -69,7 +70,7 @@ func (c *Connection) addWorker() {
for point := range c.points {
err := c.client.SendAll(point)
if err != nil {
log.Fatal(err)
log.WithField("database", "graphite").Fatal(err)
return
}
}

View File

@ -1,10 +1,10 @@
package influxdb
import (
"log"
"sync"
"time"
"github.com/bdlm/log"
"github.com/influxdata/influxdb1-client/models"
"github.com/influxdata/influxdb1-client/v2"
@ -100,13 +100,16 @@ func (conn *Connection) addPoint(name string, tags models.Tags, fields models.Fi
if value, ok := valueInterface.(string); ok && tags.Get([]byte(tag)) == nil {
tags.SetString(tag, value)
} else {
log.Println(name, "could not saved configured value of tag", tag)
log.WithFields(map[string]interface{}{
"name": name,
"tag": tag,
}).Warnf("count not save tag configuration on point")
}
}
}
point, err := client.NewPoint(name, tags.Map(), fields, t...)
if err != nil {
panic(err)
log.Panicf("count not save points: %s", err)
}
conn.points <- point
}
@ -156,10 +159,10 @@ func (conn *Connection) addWorker() {
// write batch now?
if bp != nil && (writeNow || closed || len(bp.Points()) >= batchMaxSize) {
log.Println("saving", len(bp.Points()), "points")
log.WithField("count", len(bp.Points())).Info("saving points")
if err = conn.client.Write(bp); err != nil {
log.Print(err)
log.Error(err)
}
writeNow = false
bp = nil

View File

@ -7,7 +7,6 @@ package logging
*/
import (
"fmt"
"log"
"os"
"time"
@ -64,6 +63,6 @@ func (conn *Connection) Close() {
}
func (conn *Connection) log(v ...interface{}) {
log.Println(v...)
fmt.Println(v...)
conn.file.WriteString(fmt.Sprintln("[", time.Now().String(), "]", v))
}

View File

@ -7,10 +7,11 @@ import (
"bufio"
"compress/flate"
"encoding/json"
"log"
"net"
"time"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/data"
"github.com/FreifunkBremen/yanic/database"
"github.com/FreifunkBremen/yanic/runtime"
@ -59,7 +60,7 @@ func (conn *Connection) InsertNode(node *runtime.Node) {
flater, err := flate.NewWriter(writer, flate.BestCompression)
if err != nil {
log.Printf("[database-yanic] could not create flater: %s", err)
log.Errorf("[database-yanic] could not create flater: %s", err)
return
}
defer flater.Close()
@ -69,16 +70,16 @@ func (conn *Connection) InsertNode(node *runtime.Node) {
if node.Nodeinfo != nil && node.Nodeinfo.NodeID != "" {
nodeid = node.Nodeinfo.NodeID
}
log.Printf("[database-yanic] could not encode %s node: %s", nodeid, err)
log.WithField("node_id", nodeid).Errorf("[database-yanic] could not encode node: %s", err)
return
}
err = flater.Flush()
if err != nil {
log.Printf("[database-yanic] could not compress: %s", err)
log.Errorf("[database-yanic] could not compress: %s", err)
}
err = writer.Flush()
if err != nil {
log.Printf("[database-yanic] could not send: %s", err)
log.Errorf("[database-yanic] could not send: %s", err)
}
}

37
main.go
View File

@ -1,7 +1,42 @@
package main
import "github.com/FreifunkBremen/yanic/cmd"
import (
"os"
"github.com/bdlm/log"
stdLogger "github.com/bdlm/std/logger"
"github.com/FreifunkBremen/yanic/cmd"
)
type Hook struct{}
func (hook *Hook) Fire(entry *log.Entry) error {
switch entry.Level {
case log.PanicLevel:
entry.Logger.Out = os.Stderr
case log.FatalLevel:
entry.Logger.Out = os.Stderr
case log.ErrorLevel:
entry.Logger.Out = os.Stderr
case log.WarnLevel:
entry.Logger.Out = os.Stdout
case log.InfoLevel:
entry.Logger.Out = os.Stdout
case log.DebugLevel:
entry.Logger.Out = os.Stdout
default:
}
return nil
}
func (hook *Hook) Levels() []stdLogger.Level {
return log.AllLevels
}
func main() {
log.AddHook(&Hook{})
cmd.Execute()
}

View File

@ -2,7 +2,8 @@ package all
import (
"fmt"
"log"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/output"
"github.com/FreifunkBremen/yanic/output/filter"
@ -23,7 +24,7 @@ func Register(configuration map[string]interface{}) (output.Output, error) {
for outputType, outputRegister := range output.Adapters {
configForOutput := allOutputs[outputType]
if configForOutput == nil {
log.Printf("the output type '%s' has no configuration\n", outputType)
log.WithField("output", outputType).Infof("no configuration found")
continue
}
outputConfigs, ok := configForOutput.([]interface{})

View File

@ -3,8 +3,10 @@ package filter
import (
"fmt"
"github.com/FreifunkBremen/yanic/runtime"
"github.com/bdlm/log"
"github.com/pkg/errors"
"github.com/FreifunkBremen/yanic/runtime"
)
// factory function for building a filter
@ -24,7 +26,7 @@ var filters = make(map[string]factory)
// Register registers a new filter
func Register(name string, f factory) {
if _, ok := filters[name]; ok {
panic("already registered: " + name)
log.WithField("filter", name).Panic("filter already registered")
}
filters[name] = f
}

View File

@ -2,9 +2,10 @@ package meshviewerFFRGB
import (
"fmt"
"log"
"strings"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/lib/jsontime"
"github.com/FreifunkBremen/yanic/runtime"
)
@ -81,7 +82,13 @@ func transform(nodes *runtime.Nodes) *Meshviewer {
if link.Type == LINK_TYPE_FALLBACK {
link.Type = linkType
} else {
log.Printf("different linktypes for '%s' - '%s' prev: '%s' new: '%s' source: '%s' target: '%s'", linkOrigin.SourceAddress, linkOrigin.TargetAddress, link.Type, linkType, typeList[linkOrigin.SourceAddress], typeList[linkOrigin.TargetAddress])
log.WithFields(map[string]interface{}{
"link": fmt.Sprintf("%s-%s", linkOrigin.SourceAddress, linkOrigin.TargetAddress),
"prev": link.Type,
"new": linkType,
"source": typeList[linkOrigin.SourceAddress],
"target": typeList[linkOrigin.TargetAddress],
}).Warn("different linktypes")
}
}

View File

@ -2,7 +2,8 @@ package meshviewer
import (
"fmt"
"log"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/output"
"github.com/FreifunkBremen/yanic/runtime"

View File

@ -5,10 +5,11 @@ import (
"compress/flate"
"encoding/json"
"fmt"
"log"
"net"
"time"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/data"
"github.com/FreifunkBremen/yanic/database"
"github.com/FreifunkBremen/yanic/lib/jsontime"
@ -67,7 +68,7 @@ func (coll *Collector) listenUDP(iface InterfaceConfig) {
} else {
addr, err = getUnicastAddr(iface.InterfaceName, iface.MulticastAddress == "")
if err != nil {
log.Panic(err)
log.WithField("iface", iface.InterfaceName).Panic(err)
}
}
@ -122,16 +123,16 @@ func getUnicastAddr(ifname string, linklocal bool) (net.IP, error) {
if ip != nil {
return ip, nil
}
return nil, fmt.Errorf("unable to find a unicast address for %s", ifname)
return nil, fmt.Errorf("unable to find a unicast address")
}
// Start Collector
func (coll *Collector) Start(interval time.Duration) {
if coll.interval != 0 {
panic("already started")
log.Panic("already started")
}
if interval <= 0 {
panic("invalid collector interval")
log.Panic("invalid collector interval")
}
coll.interval = interval
@ -160,7 +161,7 @@ func (coll *Collector) sendOnce() {
}
func (coll *Collector) sendMulticast() {
log.Println("sending multicasts")
log.Info("sending multicasts")
for _, conn := range coll.connections {
if conn.SendRequest {
coll.sendPacket(conn.Conn, conn.MulticastAddress)
@ -189,13 +190,16 @@ func (coll *Collector) sendUnicasts(seenBefore jsontime.Time) {
send++
}
if send == 0 {
log.Printf("unable to find connection for %s", node.Address.Zone)
log.WithField("iface", node.Address.Zone).Error("unable to find connection")
} else {
time.Sleep(10 * time.Millisecond)
count += send
}
}
log.Printf("sending %d unicast pkg for %d nodes", count, len(nodes))
log.WithFields(map[string]interface{}{
"pkg_count": count,
"nodes_count": len(nodes),
}).Info("sending unicast pkg")
}
// SendPacket sends a UDP request to the given unicast or multicast address on the first UDP socket
@ -212,7 +216,7 @@ func (coll *Collector) sendPacket(conn *net.UDPConn, destination net.IP) {
}
if _, err := conn.WriteToUDP([]byte("GET nodeinfo statistics neighbours"), &addr); err != nil {
log.Println("WriteToUDP failed:", err)
log.WithField("address", addr.String()).Errorf("WriteToUDP failed: %s", err)
}
}
@ -234,7 +238,7 @@ func (coll *Collector) sender() {
func (coll *Collector) parser() {
for obj := range coll.queue {
if data, err := obj.parse(); err != nil {
log.Println("unable to decode response from", obj.Address.String(), err)
log.WithField("address", obj.Address.String()).Errorf("unable to decode response %s", err)
} else {
coll.saveResponse(obj.Address, data)
}
@ -266,7 +270,10 @@ func (coll *Collector) saveResponse(addr *net.UDPAddr, res *data.ResponseData) {
// Check length of nodeID
if len(nodeID) != 12 {
log.Printf("invalid NodeID '%s' from %s", nodeID, addr.String())
log.WithFields(map[string]interface{}{
"node_id": nodeID,
"address": addr.String(),
}).Warn("invalid NodeID")
return
}
@ -306,7 +313,14 @@ func (coll *Collector) receiver(conn *net.UDPConn) {
n, src, err := conn.ReadFromUDP(buf)
if err != nil {
log.Println("ReadFromUDP failed:", err)
if conn != nil {
log.WithFields(map[string]interface{}{
"local": conn.LocalAddr(),
"remote": conn.RemoteAddr(),
}).Errorf("ReadFromUDP failed: %s", err)
} else {
log.Errorf("ReadFromUDP failed: %s", err)
}
return
}

View File

@ -9,6 +9,8 @@ import (
"strconv"
"strings"
"time"
"github.com/bdlm/log"
)
var linePattern = regexp.MustCompile("^<!-- ....-..-.. ..:..:.. [A-Z]+ / (\\d+) --> <row><v>([^<]+)</v><v>([^<]+)</v></row>")
@ -27,10 +29,10 @@ func Read(rrdFile string) chan Dataset {
stdout, err := cmd.StdoutPipe()
if err != nil {
panic(err)
log.Panicf("error on get stdout: %s", err)
}
if err := cmd.Start(); err != nil {
panic(err)
log.Panicf("error on start rrdtool: %s", err)
}
r := bufio.NewReader(stdout)

View File

@ -2,11 +2,12 @@ package runtime
import (
"encoding/json"
"log"
"os"
"sync"
"time"
"github.com/bdlm/log"
"github.com/FreifunkBremen/yanic/data"
"github.com/FreifunkBremen/yanic/lib/jsontime"
)
@ -186,7 +187,7 @@ func (nodes *Nodes) readIfaces(nodeinfo *data.NodeInfo) {
network := nodeinfo.Network
if nodeID == "" {
log.Println("nodeID missing in nodeinfo")
log.Warn("nodeID missing in nodeinfo")
return
}
@ -202,7 +203,7 @@ func (nodes *Nodes) readIfaces(nodeinfo *data.NodeInfo) {
}
if oldNodeID, _ := nodes.ifaceToNodeID[addr]; oldNodeID != nodeID {
if oldNodeID != "" {
log.Printf("override nodeID from %s to %s on MAC address %s", oldNodeID, nodeID, addr)
log.Warnf("override nodeID from %s to %s on MAC address %s", oldNodeID, nodeID, addr)
}
nodes.ifaceToNodeID[addr] = nodeID
}
@ -214,7 +215,7 @@ func (nodes *Nodes) load() {
if f, err := os.Open(path); err == nil { // transform data to legacy meshviewer
if err = json.NewDecoder(f).Decode(nodes); err == nil {
log.Println("loaded", len(nodes.List), "nodes")
log.Infof("loaded %d nodes", len(nodes.List))
nodes.Lock()
for _, node := range nodes.List {
@ -225,10 +226,10 @@ func (nodes *Nodes) load() {
nodes.Unlock()
} else {
log.Println("failed to unmarshal nodes:", err)
log.Errorf("failed to unmarshal nodes: %s", err)
}
} else {
log.Println("failed to load cached nodes:", err)
log.Errorf("failed to load cached nodes: %s", err)
}
}

View File

@ -70,13 +70,15 @@ func TestLoadAndSave(t *testing.T) {
nodes.save()
os.Remove(tmpfile.Name())
assert.PanicsWithValue("open /dev/null.tmp: permission denied", func() {
assert.Panics(func() {
SaveJSON(nodes, "/dev/null")
// "open /dev/null.tmp: permission denied",
})
tmpfile, _ = ioutil.TempFile("/tmp", "nodes")
assert.PanicsWithValue("json: unsupported type: func() string", func() {
assert.Panics(func() {
SaveJSON(tmpfile.Name, tmpfile.Name())
// "json: unsupported type: func() string",
})
os.Remove(tmpfile.Name())

View File

@ -4,6 +4,7 @@ import (
"net/http"
"github.com/NYTimes/gziphandler"
"github.com/bdlm/log"
)
// New creates a new webserver and starts it
@ -17,6 +18,6 @@ func New(bindAddr, webroot string) *http.Server {
func Start(srv *http.Server) {
// service connections
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
panic(err)
log.Panicf("webserver crashed: %s", err)
}
}