Initial commit
This commit is contained in:
BIN
Conventional-Sites.xlsx
Normal file
BIN
Conventional-Sites.xlsx
Normal file
Binary file not shown.
66
uem-events.py
Executable file
66
uem-events.py
Executable file
@@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
import csv
|
||||||
|
import pandas as pd
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
from influxdb_client import InfluxDBClient, Point, WritePrecision
|
||||||
|
from influxdb_client.client.write_api import SYNCHRONOUS
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
token = "-qVexb9yCtsunJlznN9Ypv51xXLJixj-5_hcJNhJejIh0W3OW4yJ-FTxWyKl4g9ENeUIqqrBBrmHaF2oqh0PFA=="
|
||||||
|
org = "MotWA"
|
||||||
|
bucket = "UEM-Events"
|
||||||
|
url = "http://192.168.254.201:8086"
|
||||||
|
directory_path = '/home/bxn478@ds.mot.com/UEM'
|
||||||
|
|
||||||
|
client = InfluxDBClient(url=url, token=token, org=org)
|
||||||
|
write_api = client.write_api(write_options=SYNCHRONOUS)
|
||||||
|
|
||||||
|
csv_files = list(Path(directory_path).rglob('*.csv'))
|
||||||
|
|
||||||
|
df = pd.read_excel('Conventional-Sites.xlsx')
|
||||||
|
channels = dict(zip(df['Site ID'], df['Site Name']))
|
||||||
|
|
||||||
|
def upload_events(file_path):
|
||||||
|
with open(file_path, mode='r', encoding='utf-8', errors='replace') as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
print(file.name)
|
||||||
|
try:
|
||||||
|
for row in reader:
|
||||||
|
# print(row)
|
||||||
|
try:
|
||||||
|
if row['Entity'].startswith("ZC Conventional Site Link") and row['Entity'].endswith("1"):
|
||||||
|
# Determine status value: 1 for restored (UP), 0 for DOWN
|
||||||
|
if row['Message'].startswith("UP"):
|
||||||
|
status_val = 1
|
||||||
|
elif row['Message'].startswith("DOWN"):
|
||||||
|
status_val = 0
|
||||||
|
else:
|
||||||
|
status_val = None
|
||||||
|
|
||||||
|
if status_val is not None:
|
||||||
|
# Convert your CSV timestamp to a datetime object
|
||||||
|
# Adjust the format string '%Y-%m-%d %H:%M:%S' to match your CSV
|
||||||
|
# Mar 16, 2026 00:00:32 +0800
|
||||||
|
dt = datetime.strptime(row['Date/Time'], '%b %d, %Y %H:%M:%S %z')
|
||||||
|
|
||||||
|
siteID = row['Entity'][-6:-2]
|
||||||
|
channel = channels[int(siteID)]
|
||||||
|
|
||||||
|
point = Point("channel_status") \
|
||||||
|
.tag("channel", channel) \
|
||||||
|
.field("status", status_val) \
|
||||||
|
.time(dt, WritePrecision.S)
|
||||||
|
|
||||||
|
write_api.write(bucket=bucket, org=org, record=point)
|
||||||
|
# print(f"Logged {channel} as {status_val} at {row['Date/Time']}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing a specific row: {e}")
|
||||||
|
except csv.Error as e:
|
||||||
|
print(f"CSV format error at line {reader.line_num}: {e}")
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
for file in csv_files:
|
||||||
|
upload_events(file)
|
||||||
|
|
||||||
|
client.close()
|
||||||
Reference in New Issue
Block a user