Skip to content

Commit 178a3d9

Browse files
committed
Update code and dependencies to support python3.10 to python3.11
1 parent 1ba5a6c commit 178a3d9

25 files changed

Lines changed: 636 additions & 544 deletions

.github/workflows/check.yml

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
name: Check-Build-Publish
2+
3+
on:
4+
push:
5+
branches: ['*']
6+
pull_request:
7+
branches:
8+
- master
9+
schedule:
10+
# run every Monday at 6am
11+
- cron: '0 6 * * 1'
12+
13+
jobs:
14+
tests:
15+
runs-on: ubuntu-22.04
16+
strategy:
17+
matrix:
18+
python-version: ["3.10", "3.11", "3.12"]
19+
steps:
20+
- name: Checkout repository
21+
uses: actions/checkout@v4
22+
- name: "Set up Python ${{ matrix.python-version }}"
23+
uses: actions/setup-python@v4
24+
with:
25+
python-version: "${{ matrix.python-version }}"
26+
- name: Install dependencies
27+
run: |
28+
python -m pip install --upgrade pip tox
29+
tox
30+
- name: "Run tox for ${{ matrix.python-version }}"
31+
run: make check

logshipper/cmd.py

Lines changed: 30 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import time
2323

2424
import eventlet
25+
2526
eventlet.monkey_patch()
2627

2728
import logshipper.pipeline
@@ -32,29 +33,28 @@
3233

3334
def main():
3435
global LOG, ARGS
35-
parser = argparse.ArgumentParser(
36-
description="Processes log messages and sends them elsewhere")
36+
parser = argparse.ArgumentParser(description="Processes log messages and sends them elsewhere")
3737

38-
parser.add_argument('pipeline', nargs='+',
39-
help='Where to find pipelines (*.yml files)')
38+
parser.add_argument("pipeline", nargs="+", help="Where to find pipelines (*.yml files)")
4039

41-
parser.add_argument('--verbose', action='store_true')
42-
parser.add_argument('--debug', action='store_true')
40+
parser.add_argument("--verbose", action="store_true")
41+
parser.add_argument("--debug", action="store_true")
4342

4443
ARGS = parser.parse_args()
4544

4645
if ARGS.debug:
47-
log_level = 'DEBUG'
46+
log_level = "DEBUG"
4847
elif ARGS.verbose:
49-
log_level = 'INFO'
48+
log_level = "INFO"
5049
else:
51-
log_level = 'WARNING'
50+
log_level = "WARNING"
5251

5352
logging.basicConfig(level=log_level)
5453
LOG = logging.getLogger(__name__)
5554

5655
pipeline_manager = logshipper.pipeline.PipelineManager(
57-
[os.path.abspath(p) for p in ARGS.pipeline])
56+
[os.path.abspath(p) for p in ARGS.pipeline]
57+
)
5858

5959
pipeline_manager.start()
6060

@@ -67,49 +67,50 @@ def main():
6767

6868
def ship_file():
6969
global LOG, ARGS
70-
parser = argparse.ArgumentParser(
71-
description="Processes log messages and sends them elsewhere")
70+
parser = argparse.ArgumentParser(description="Processes log messages and sends them elsewhere")
7271

73-
parser.add_argument('--pipeline', required=True)
72+
parser.add_argument("--pipeline", required=True)
7473

75-
parser.add_argument('--pipeline-path', action='append',
76-
help='Where to find pipelines (*.yml files)')
74+
parser.add_argument(
75+
"--pipeline-path", action="append", help="Where to find pipelines (*.yml files)"
76+
)
7777

78-
parser.add_argument('file', nargs='+', help='File to ship')
78+
parser.add_argument("file", nargs="+", help="File to ship")
7979

80-
parser.add_argument('--verbose', action='store_true')
81-
parser.add_argument('--debug', action='store_true')
80+
parser.add_argument("--verbose", action="store_true")
81+
parser.add_argument("--debug", action="store_true")
8282

8383
ARGS = parser.parse_args()
8484

8585
if ARGS.debug:
86-
log_level = 'DEBUG'
86+
log_level = "DEBUG"
8787
elif ARGS.verbose:
88-
log_level = 'INFO'
88+
log_level = "INFO"
8989
else:
90-
log_level = 'WARNING'
90+
log_level = "WARNING"
9191

9292
logging.basicConfig(level=log_level)
9393
LOG = logging.getLogger(__name__)
9494

9595
pipeline_manager = logshipper.pipeline.PipelineManager(
96-
[os.path.abspath(p) for p in ARGS.pipeline_path])
96+
[os.path.abspath(p) for p in ARGS.pipeline_path]
97+
)
9798

9899
pipeline_manager.load_pipelines()
99100

100101
for filename in ARGS.file:
101102
# Peek to detect gzip
102-
with open(filename, 'rb') as f:
103+
with open(filename, "rb") as f:
103104
header = f.read(16)
104105

105106
if header[0:2] == b"\037\213":
106-
print("Processing gzipped file %s" % filename)
107+
print(("Processing gzipped file %s" % filename))
107108
file_handle = gzip.open(filename)
108-
elif header[0:3] == b"\x42\x5A\x68":
109-
print("Processing bz2'ed file %s" % filename)
109+
elif header[0:3] == b"\x42\x5a\x68":
110+
print(("Processing bz2'ed file %s" % filename))
110111
file_handle = bz2.BZ2File(filename)
111112
else:
112-
print("Processing uncompressed file %s" % filename)
113+
print(("Processing uncompressed file %s" % filename))
113114
file_handle = open(filename)
114115

115116
with file_handle:
@@ -120,8 +121,7 @@ def ship_file():
120121
lines += 1
121122
line = line.rstrip("\r\n")
122123
try:
123-
pipeline_manager.process({'message': line},
124-
ARGS.pipeline)
124+
pipeline_manager.process({"message": line}, ARGS.pipeline)
125125
except KeyboardInterrupt:
126126
break
127127
except Exception:
@@ -131,8 +131,7 @@ def ship_file():
131131

132132
if (time.time() - treport) > 1:
133133
rate = lines / (time.time() - start_time)
134-
sys.stdout.write(
135-
"Processing %i lines, %4.1f/s\r" % (lines, rate))
134+
sys.stdout.write("Processing %i lines, %4.1f/s\r" % (lines, rate))
136135
treport = time.time()
137136
sys.stdout.flush()
138137

logshipper/context.py

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,11 @@
2020

2121

2222
def _template_code(template):
23-
if (template in (True, False, None) or
24-
isinstance(template, six.integer_types) or
25-
isinstance(template, float)):
23+
if (
24+
template in (True, False, None)
25+
or isinstance(template, six.integer_types)
26+
or isinstance(template, float)
27+
):
2628
return {}, repr(template)
2729

2830
if isinstance(template, six.string_types):
@@ -44,7 +46,7 @@ def _template_code(template):
4446
result = ["{"]
4547
namespace = {}
4648

47-
for key, value in template.items():
49+
for key, value in list(template.items()):
4850
sub_namespace, value_code = _template_code(value)
4951
result.append("%r:%s," % (key, value_code))
5052
namespace.update(sub_namespace)
@@ -83,44 +85,41 @@ def _template_code_string(template):
8385

8486
if conversion:
8587
namespace["fmt"] = fmt
86-
value_code = "fmt.convert_field(%s, %r)" % (value_code,
87-
conversion)
88+
value_code = "fmt.convert_field(%s, %r)" % (value_code, conversion)
8889

8990
if not format_spec:
9091
result.append("str(%s)" % value_code)
91-
elif '{' in format_spec:
92+
elif "{" in format_spec:
9293
namespace["fmt"] = fmt
93-
result.append("format(%s, fmt.vformat(%r, args, kwargs))" %
94-
(value_code, format_spec))
94+
result.append(
95+
"format(%s, fmt.vformat(%r, args, kwargs))" % (value_code, format_spec)
96+
)
9597
else:
9698
result.append("format(%s, %r)\n" % (value_code, format_spec))
9799

98100
if not result:
99-
return namespace, "\"\""
101+
return namespace, '""'
100102
if len(result) == 1:
101103
return namespace, result[0]
102104

103-
return namespace, "\"\".join([%s])" % (", ".join(result))
105+
return namespace, '"".join([%s])' % (", ".join(result))
104106

105107

106108
def prepare_template(template):
107109
namespace, code = _template_code(template)
108110

109-
result = ("def template(args, kwargs):\n"
110-
" return %s" % code)
111+
result = "def template(args, kwargs):\n" " return %s" % code
111112

112113
six.exec_(result, namespace)
113114

114115
func = namespace["template"]
115-
func.interpolate = lambda context: func(context.backreferences,
116-
context.message)
116+
func.interpolate = lambda context: func(context.backreferences, context.message)
117117

118118
return func
119119

120120

121121
class Context(object):
122-
__slots__ = ['pipeline_manager', 'message', 'match', 'match_field',
123-
'backreferences', 'matches']
122+
__slots__ = ["pipeline_manager", "message", "match", "match_field", "backreferences", "matches"]
124123

125124
def __init__(self, message, pipeline_manager):
126125
self.pipeline_manager = pipeline_manager

logshipper/elasticsearch.py

Lines changed: 21 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,12 @@ def json_default(value):
2929
else:
3030
return str(value)
3131

32+
3233
TRUE_VALUES = set([True, 1, "yes", "true", "on"])
3334

3435

3536
def to_bool(value):
36-
return (value in TRUE_VALUES or
37-
str(value).lower() in TRUE_VALUES)
37+
return value in TRUE_VALUES or str(value).lower() in TRUE_VALUES
3838

3939

4040
def md5_hash(data):
@@ -65,31 +65,32 @@ def prepare_elasticsearch_http(parameters):
6565
```http://localhost:9200/```.
6666
"""
6767

68-
index = parameters.get('index', 'logshipper-{timestamp:%Y.%m.%d}')
68+
index = parameters.get("index", "logshipper-{timestamp:%Y.%m.%d}")
6969
index = logshipper.context.prepare_template(index)
7070

71-
if 'id' in parameters:
72-
id_ = logshipper.context.prepare_template(parameters['id']).interpolate
71+
if "id" in parameters:
72+
id_ = logshipper.context.prepare_template(parameters["id"]).interpolate
7373
else:
7474
id_ = None
7575

76-
doctype = parameters.get('doctype', 'log')
77-
timestamp_field = parameters.get('timestamp', '@timestamp')
76+
doctype = parameters.get("doctype", "log")
77+
timestamp_field = parameters.get("timestamp", "@timestamp")
78+
79+
if "document" in parameters:
80+
document_template = logshipper.context.prepare_template(parameters["document"]).interpolate
81+
elif timestamp_field != "timestamp":
7882

79-
if 'document' in parameters:
80-
document_template = logshipper.context.prepare_template(
81-
parameters['document']).interpolate
82-
elif timestamp_field != 'timestamp':
8383
def document_template(context):
8484
result = dict(context.message)
8585
result[timestamp_field] = result.pop("timestamp")
8686
return result
87+
8788
else:
8889
document_template = lambda context: dict(context.message)
8990

90-
sort_keys = to_bool(parameters.get('sort_keys', False))
91+
sort_keys = to_bool(parameters.get("sort_keys", False))
9192

92-
base_url = parameters.get('url', "http://localhost:9200/")
93+
base_url = parameters.get("url", "http://localhost:9200/")
9394
if not base_url.endswith("/"):
9495
base_url += "/"
9596

@@ -98,11 +99,14 @@ def document_template(context):
9899
def handle_elasticsearch_http(message, context):
99100
document = document_template(context)
100101

101-
document = json.dumps(document, default=json_default,
102-
sort_keys=sort_keys).encode('utf8')
102+
document = json.dumps(document, default=json_default, sort_keys=sort_keys).encode("utf8")
103103

104-
url = "%s%s/%s/%s" % (base_url, index.interpolate(context), doctype,
105-
id_(context) if id_ else md5_hash(document))
104+
url = "%s%s/%s/%s" % (
105+
base_url,
106+
index.interpolate(context),
107+
doctype,
108+
id_(context) if id_ else md5_hash(document),
109+
)
106110

107111
result = session.put(url, data=document)
108112
result.raise_for_status()

0 commit comments

Comments
 (0)