Blacken python scripts
This commit is contained in:
parent
0984845eab
commit
07bfb40425
6
.pre-commit-config.yaml
Normal file
6
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: stable
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
language_version: python3
|
|
@ -2,31 +2,54 @@
|
||||||
# made for samsung galaxy note 4
|
# made for samsung galaxy note 4
|
||||||
# only works with stylus
|
# only works with stylus
|
||||||
# open phone app, dial *#0*#, choose “Black”
|
# open phone app, dial *#0*#, choose “Black”
|
||||||
|
# requirements:
|
||||||
|
# * python-libxdo
|
||||||
import argparse
|
import argparse
|
||||||
import subprocess
|
import subprocess
|
||||||
from xdo import Xdo
|
from xdo import Xdo
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Use android phone stylus as input method')
|
parser = argparse.ArgumentParser(description="Use android phone stylus as input method")
|
||||||
parser.add_argument('--pos', type=str, default='100x100+0x0', help='Position on phone to use in percent (e.g. 40x40+10x10)')
|
parser.add_argument(
|
||||||
parser.add_argument('--screen', type=str, default='1920x1080+0x0', help='Screen resolution and offset')
|
"--pos",
|
||||||
parser.add_argument('--phone', type=str, default='12544x7056', help='Phone resolution (not necessarily equal to the phone’s screen resolution)')
|
type=str,
|
||||||
|
default="100x100+0x0",
|
||||||
|
help="Position on phone to use in percent (e.g. 40x40+10x10)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--screen", type=str, default="1920x1080+0x0", help="Screen resolution and offset"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--phone",
|
||||||
|
type=str,
|
||||||
|
default="12544x7056",
|
||||||
|
help="Phone resolution (not necessarily equal to the phone’s screen resolution)",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
screen_res = tuple(map(int, args.screen.split('+')[0].split('x')))
|
screen_res = tuple(map(int, args.screen.split("+")[0].split("x")))
|
||||||
offset = tuple(map(int, args.screen.split('+')[1].split('x')))
|
offset = tuple(map(int, args.screen.split("+")[1].split("x")))
|
||||||
phone_res = tuple(map(int, args.phone.split('x')))
|
phone_res = tuple(map(int, args.phone.split("x")))
|
||||||
|
|
||||||
limits = [[0, 0], [0, 0]]
|
limits = [[0, 0], [0, 0]]
|
||||||
limits[0][0] = int(args.pos.split('+')[1].split('x')[0]) / 100 * phone_res[0]
|
limits[0][0] = int(args.pos.split("+")[1].split("x")[0]) / 100 * phone_res[0]
|
||||||
limits[0][1] = limits[0][0] + int(args.pos.split('+')[0].split('x')[0]) / 100 * phone_res[0]
|
limits[0][1] = (
|
||||||
limits[1][0] = int(args.pos.split('+')[1].split('x')[1]) / 100 * phone_res[1]
|
limits[0][0] + int(args.pos.split("+")[0].split("x")[0]) / 100 * phone_res[0]
|
||||||
limits[1][1] = limits[1][0] + int(args.pos.split('+')[0].split('x')[1]) / 100 * phone_res[1]
|
)
|
||||||
|
limits[1][0] = int(args.pos.split("+")[1].split("x")[1]) / 100 * phone_res[1]
|
||||||
|
limits[1][1] = (
|
||||||
|
limits[1][0] + int(args.pos.split("+")[0].split("x")[1]) / 100 * phone_res[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def real_value(axis, value):
|
def real_value(axis, value):
|
||||||
if axis == 1:
|
if axis == 1:
|
||||||
value = phone_res[1] - value
|
value = phone_res[1] - value
|
||||||
# https://stackoverflow.com/a/929107
|
# https://stackoverflow.com/a/929107
|
||||||
return int(((value - limits[axis][0]) * screen_res[axis]) / (limits[axis][1] - limits[axis][0]))
|
return int(
|
||||||
|
((value - limits[axis][0]) * screen_res[axis])
|
||||||
|
/ (limits[axis][1] - limits[axis][0])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
update = 0
|
update = 0
|
||||||
x = 0
|
x = 0
|
||||||
|
@ -35,17 +58,19 @@ pressure = 0
|
||||||
|
|
||||||
xdo = Xdo()
|
xdo = Xdo()
|
||||||
|
|
||||||
process = subprocess.Popen(['adb', 'shell', 'getevent', '-q', '-l'], stdout=subprocess.PIPE)
|
process = subprocess.Popen(
|
||||||
|
["adb", "shell", "getevent", "-q", "-l"], stdout=subprocess.PIPE
|
||||||
|
)
|
||||||
|
|
||||||
for line in process.stdout:
|
for line in process.stdout:
|
||||||
line = line.decode('utf-8')
|
line = line.decode("utf-8")
|
||||||
line = line.split()
|
line = line.split()
|
||||||
if line[1] != 'EV_ABS':
|
if line[1] != "EV_ABS":
|
||||||
continue
|
continue
|
||||||
event = line[2]
|
event = line[2]
|
||||||
value = int('0x' + line[3], 16)
|
value = int("0x" + line[3], 16)
|
||||||
|
|
||||||
if event == 'ABS_PRESSURE':
|
if event == "ABS_PRESSURE":
|
||||||
if value == 0 and pressure != 0:
|
if value == 0 and pressure != 0:
|
||||||
xdo.mouse_up(0, 1)
|
xdo.mouse_up(0, 1)
|
||||||
elif pressure == 0:
|
elif pressure == 0:
|
||||||
|
@ -53,13 +78,13 @@ for line in process.stdout:
|
||||||
pressure = value
|
pressure = value
|
||||||
|
|
||||||
# Y and X flipped (landscape)
|
# Y and X flipped (landscape)
|
||||||
elif event == 'ABS_Y':
|
elif event == "ABS_Y":
|
||||||
old_x = x
|
old_x = x
|
||||||
x = real_value(0, value)
|
x = real_value(0, value)
|
||||||
if old_x != x:
|
if old_x != x:
|
||||||
update += 1
|
update += 1
|
||||||
|
|
||||||
elif event == 'ABS_X':
|
elif event == "ABS_X":
|
||||||
old_y = y
|
old_y = y
|
||||||
y = real_value(1, value)
|
y = real_value(1, value)
|
||||||
if old_y != y:
|
if old_y != y:
|
||||||
|
|
|
@ -6,29 +6,38 @@ import re
|
||||||
import requests
|
import requests
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def parse_filename(filename):
|
def parse_filename(filename):
|
||||||
split = re.split(':|,|\+|>', filename)
|
split = re.split(":|,|\+|>", filename)
|
||||||
return map(int, split[1:])
|
return map(int, split[1:])
|
||||||
|
|
||||||
|
|
||||||
def get_image_urls():
|
def get_image_urls():
|
||||||
url = sys.argv[1]
|
url = sys.argv[1]
|
||||||
soup = BeautifulSoup(requests.get(url).text, 'html.parser')
|
soup = BeautifulSoup(requests.get(url).text, "html.parser")
|
||||||
for div in soup.find(id='content').find_all('div'):
|
for div in soup.find(id="content").find_all("div"):
|
||||||
yield url + div.get('data-ptimg')
|
yield url + div.get("data-ptimg")
|
||||||
|
|
||||||
|
|
||||||
pages = list(get_image_urls())
|
pages = list(get_image_urls())
|
||||||
|
|
||||||
for metadata_url in tqdm(pages):
|
for metadata_url in tqdm(pages):
|
||||||
image_url = re.sub('\.ptimg\.json$', '.jpg', metadata_url)
|
image_url = re.sub("\.ptimg\.json$", ".jpg", metadata_url)
|
||||||
|
|
||||||
ptimg_data = requests.get(metadata_url).json()
|
ptimg_data = requests.get(metadata_url).json()
|
||||||
image_data = requests.get(image_url, stream=True).raw
|
image_data = requests.get(image_url, stream=True).raw
|
||||||
|
|
||||||
scrambled_image = Image.open(image_data)
|
scrambled_image = Image.open(image_data)
|
||||||
combined_image = Image.new('RGB', (ptimg_data['views'][0]['width'], ptimg_data['views'][0]['height']))
|
combined_image = Image.new(
|
||||||
|
"RGB", (ptimg_data["views"][0]["width"], ptimg_data["views"][0]["height"])
|
||||||
|
)
|
||||||
|
|
||||||
for from_x, from_y, width, height, to_x, to_y in map(parse_filename, ptimg_data['views'][0]['coords']):
|
for from_x, from_y, width, height, to_x, to_y in map(
|
||||||
chunk_data = scrambled_image.crop((from_x, from_y, from_x+width, from_y+height))
|
parse_filename, ptimg_data["views"][0]["coords"]
|
||||||
|
):
|
||||||
|
chunk_data = scrambled_image.crop(
|
||||||
|
(from_x, from_y, from_x + width, from_y + height)
|
||||||
|
)
|
||||||
combined_image.paste(chunk_data, (to_x, to_y))
|
combined_image.paste(chunk_data, (to_x, to_y))
|
||||||
|
|
||||||
combined_image.save(image_url.split('/')[-1])
|
combined_image.save(image_url.split("/")[-1])
|
||||||
|
|
|
@ -7,31 +7,35 @@ import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
raise Exception('sys.argv[1]')
|
raise Exception("sys.argv[1]")
|
||||||
|
|
||||||
|
|
||||||
def parse_chapter_api(data):
|
def parse_chapter_api(data):
|
||||||
if not data['server'].startswith('http'):
|
if not data["server"].startswith("http"):
|
||||||
data['server'] = 'https://mangadex.org/data/'
|
data["server"] = "https://mangadex.org/data/"
|
||||||
base_url = data['server'] + data['hash'] + '/'
|
base_url = data["server"] + data["hash"] + "/"
|
||||||
return list(map(lambda page: base_url + page, data['page_array']))
|
return list(map(lambda page: base_url + page, data["page_array"]))
|
||||||
|
|
||||||
|
|
||||||
for chapter in tqdm(sys.argv[1:]):
|
for chapter in tqdm(sys.argv[1:]):
|
||||||
chapter_api_response = requests.get(
|
chapter_api_response = requests.get(
|
||||||
f'https://mangadex.org/api/?id={chapter}&type=chapter'
|
f"https://mangadex.org/api/?id={chapter}&type=chapter"
|
||||||
)
|
)
|
||||||
|
|
||||||
chapter_number = chapter_api_response.json()['chapter']
|
chapter_number = chapter_api_response.json()["chapter"]
|
||||||
|
|
||||||
os.makedirs(chapter_number, exist_ok=True)
|
os.makedirs(chapter_number, exist_ok=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for image in tqdm(parse_chapter_api(chapter_api_response.json())):
|
for image in tqdm(parse_chapter_api(chapter_api_response.json())):
|
||||||
r = requests.get(image, stream=True)
|
r = requests.get(image, stream=True)
|
||||||
with open(chapter_number + '/' + image.split('/')[-1], 'wb') as f:
|
with open(chapter_number + "/" + image.split("/")[-1], "wb") as f:
|
||||||
r.raw.decode_content = True
|
r.raw.decode_content = True
|
||||||
shutil.copyfileobj(r.raw, f)
|
shutil.copyfileobj(r.raw, f)
|
||||||
except simplejson.errors.JSONDecodeError as e:
|
except simplejson.errors.JSONDecodeError as e:
|
||||||
if chapter_api_response.status_code != 200:
|
if chapter_api_response.status_code != 200:
|
||||||
raise Exception(f'API request failed with HTTP status code {chapter_api_response.status_code}') from None
|
raise Exception(
|
||||||
|
f"API request failed with HTTP status code {chapter_api_response.status_code}"
|
||||||
|
) from None
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
|
@ -9,46 +9,45 @@ import sys
|
||||||
|
|
||||||
XOR_KEY = 101
|
XOR_KEY = 101
|
||||||
|
|
||||||
|
|
||||||
def get_pages(chapter):
|
def get_pages(chapter):
|
||||||
api_response = requests.get(
|
api_response = requests.get(
|
||||||
'https://api.mangarockhd.com/query/web401/pagesv2',
|
"https://api.mangarockhd.com/query/web401/pagesv2",
|
||||||
params={
|
params={"oid": "mrs-chapter-" + chapter},
|
||||||
'oid': 'mrs-chapter-' + chapter
|
|
||||||
}
|
|
||||||
).json()
|
).json()
|
||||||
for item in api_response['data']:
|
for item in api_response["data"]:
|
||||||
yield item['url']
|
yield item["url"]
|
||||||
|
|
||||||
|
|
||||||
def decode_ciphertext(byte):
|
def decode_ciphertext(byte):
|
||||||
return byte ^ XOR_KEY
|
return byte ^ XOR_KEY
|
||||||
|
|
||||||
|
|
||||||
def get_image(url):
|
def get_image(url):
|
||||||
ciphertext = requests.get(url).content
|
ciphertext = requests.get(url).content
|
||||||
size = len(ciphertext) + 7
|
size = len(ciphertext) + 7
|
||||||
cleartext = BytesIO()
|
cleartext = BytesIO()
|
||||||
cleartext.write('RIFF'.encode('ascii'))
|
cleartext.write("RIFF".encode("ascii"))
|
||||||
cleartext.write(bytes([
|
cleartext.write(
|
||||||
size >> 0 & 255,
|
bytes([size >> 0 & 255, size >> 8 & 255, size >> 16 & 255, size >> 24 & 255])
|
||||||
size >> 8 & 255,
|
)
|
||||||
size >> 16 & 255,
|
cleartext.write("WEBPVP8".encode("ascii"))
|
||||||
size >> 24 & 255
|
|
||||||
]))
|
|
||||||
cleartext.write('WEBPVP8'.encode('ascii'))
|
|
||||||
cleartext.write(bytes(list(map(decode_ciphertext, ciphertext))))
|
cleartext.write(bytes(list(map(decode_ciphertext, ciphertext))))
|
||||||
cleartext.seek(0)
|
cleartext.seek(0)
|
||||||
return cleartext
|
return cleartext
|
||||||
|
|
||||||
|
|
||||||
requested_chapters = sys.argv[1:]
|
requested_chapters = sys.argv[1:]
|
||||||
|
|
||||||
for chapter_idx, chapter in tqdm(list(enumerate(requested_chapters))):
|
for chapter_idx, chapter in tqdm(list(enumerate(requested_chapters))):
|
||||||
chapter_dir = str(chapter_idx+1)
|
chapter_dir = str(chapter_idx + 1)
|
||||||
os.makedirs(chapter_dir, exist_ok=True)
|
os.makedirs(chapter_dir, exist_ok=True)
|
||||||
pages = get_pages(chapter)
|
pages = get_pages(chapter)
|
||||||
for idx, page in tqdm(list(enumerate(pages))):
|
for idx, page in tqdm(list(enumerate(pages))):
|
||||||
filename = os.path.join(chapter_dir, f'{idx+1:04}.webp')
|
filename = os.path.join(chapter_dir, f"{idx+1:04}.webp")
|
||||||
if os.path.isfile(filename):
|
if os.path.isfile(filename):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
image = get_image(page)
|
image = get_image(page)
|
||||||
with open(filename, 'wb') as f:
|
with open(filename, "wb") as f:
|
||||||
f.write(image.read())
|
f.write(image.read())
|
||||||
|
|
|
@ -3,20 +3,22 @@ import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
def is_number(value):
|
def is_number(value):
|
||||||
numeric_chars = [*map(str, range(10)), '-']
|
numeric_chars = [*map(str, range(10)), "-"]
|
||||||
numeric_charcodes = list(map(ord, numeric_chars))
|
numeric_charcodes = list(map(ord, numeric_chars))
|
||||||
|
|
||||||
return all(ord(char) in numeric_charcodes for char in value)
|
return all(ord(char) in numeric_charcodes for char in value)
|
||||||
|
|
||||||
|
|
||||||
def parse_encoder_params(encoder_params):
|
def parse_encoder_params(encoder_params):
|
||||||
for param in encoder_params.split(' / '):
|
for param in encoder_params.split(" / "):
|
||||||
if '=' in param:
|
if "=" in param:
|
||||||
key, value = param.split('=', 1)
|
key, value = param.split("=", 1)
|
||||||
|
|
||||||
if is_number(value):
|
if is_number(value):
|
||||||
value = int(value)
|
value = int(value)
|
||||||
elif is_number(value.replace('.', '', 1)):
|
elif is_number(value.replace(".", "", 1)):
|
||||||
value = float(value)
|
value = float(value)
|
||||||
else:
|
else:
|
||||||
key = param
|
key = param
|
||||||
|
@ -24,23 +26,21 @@ def parse_encoder_params(encoder_params):
|
||||||
|
|
||||||
yield key, value
|
yield key, value
|
||||||
|
|
||||||
|
|
||||||
def run_mediainfo(file):
|
def run_mediainfo(file):
|
||||||
cmd = [
|
cmd = [
|
||||||
'mediainfo',
|
"mediainfo",
|
||||||
'--Inform=Video;%Encoded_Library%\\n%Encoded_Library_Settings%',
|
"--Inform=Video;%Encoded_Library%\\n%Encoded_Library_Settings%",
|
||||||
file
|
file,
|
||||||
]
|
]
|
||||||
process = subprocess.run(cmd, stdout=subprocess.PIPE)
|
process = subprocess.run(cmd, stdout=subprocess.PIPE)
|
||||||
output = process.stdout.decode('utf-8').split('\n')
|
output = process.stdout.decode("utf-8").split("\n")
|
||||||
|
|
||||||
encoder = output[0]
|
encoder = output[0]
|
||||||
params = dict(parse_encoder_params(output[1]))
|
params = dict(parse_encoder_params(output[1]))
|
||||||
|
|
||||||
return {
|
return {"file": file, "encoder": encoder, "params": params}
|
||||||
'file': file,
|
|
||||||
'encoder': encoder,
|
|
||||||
'params': params
|
|
||||||
}
|
|
||||||
|
|
||||||
info = list(map(run_mediainfo, sys.argv[1:]))
|
info = list(map(run_mediainfo, sys.argv[1:]))
|
||||||
print(json.dumps(info, indent=2, sort_keys=True))
|
print(json.dumps(info, indent=2, sort_keys=True))
|
||||||
|
|
|
@ -22,40 +22,42 @@ import argparse
|
||||||
import os.path
|
import os.path
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
def set_parameter(device, *args):
|
|
||||||
if device == 'stylus':
|
|
||||||
device = 'HUION Huion Tablet Pen stylus'
|
|
||||||
elif device == 'pad':
|
|
||||||
device = 'HUION Huion Tablet Pad pad'
|
|
||||||
args = map(str, args)
|
|
||||||
run(['xsetwacom', 'set', device, *args], check=True)
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='setup huion h430p tablet')
|
def set_parameter(device, *args):
|
||||||
parser.add_argument('--area', type=str, default='')
|
if device == "stylus":
|
||||||
parser.add_argument('--screen', type=str, default='')
|
device = "HUION Huion Tablet Pen stylus"
|
||||||
parser.add_argument('preset', metavar='PRESET', type=str, nargs='?', help='a preset')
|
elif device == "pad":
|
||||||
|
device = "HUION Huion Tablet Pad pad"
|
||||||
|
args = map(str, args)
|
||||||
|
run(["xsetwacom", "set", device, *args], check=True)
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="setup huion h430p tablet")
|
||||||
|
parser.add_argument("--area", type=str, default="")
|
||||||
|
parser.add_argument("--screen", type=str, default="")
|
||||||
|
parser.add_argument("preset", metavar="PRESET", type=str, nargs="?", help="a preset")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
area = '0 0 24384 15240'
|
area = "0 0 24384 15240"
|
||||||
screen = ''
|
screen = ""
|
||||||
|
|
||||||
if args.preset is not None:
|
if args.preset is not None:
|
||||||
with open(os.path.expanduser('~/.config/huion-tablet.yml'), 'r') as f:
|
with open(os.path.expanduser("~/.config/huion-tablet.yml"), "r") as f:
|
||||||
config = yaml.load(f, Loader=yaml.SafeLoader)
|
config = yaml.load(f, Loader=yaml.SafeLoader)
|
||||||
preset = config['presets'][args.preset]
|
preset = config["presets"][args.preset]
|
||||||
area = preset['area']
|
area = preset["area"]
|
||||||
screen = preset['screen']
|
screen = preset["screen"]
|
||||||
if 'buttons' in preset:
|
if "buttons" in preset:
|
||||||
for device in preset['buttons']:
|
for device in preset["buttons"]:
|
||||||
for button, mapping in preset['buttons'][device].items():
|
for button, mapping in preset["buttons"][device].items():
|
||||||
set_parameter(device, 'button', button, *str(mapping).split(' '))
|
set_parameter(device, "button", button, *str(mapping).split(" "))
|
||||||
|
|
||||||
if args.area != '':
|
if args.area != "":
|
||||||
area = args.area
|
area = args.area
|
||||||
if args.screen != '':
|
if args.screen != "":
|
||||||
screen = args.screen
|
screen = args.screen
|
||||||
|
|
||||||
area = tuple(map(int, area.split(' ')))
|
area = tuple(map(int, area.split(" ")))
|
||||||
|
|
||||||
set_parameter('stylus', 'Area', ' '.join(map(str, area)))
|
set_parameter("stylus", "Area", " ".join(map(str, area)))
|
||||||
set_parameter('stylus', 'MapToOutput', screen)
|
set_parameter("stylus", "MapToOutput", screen)
|
||||||
|
|
|
@ -5,39 +5,46 @@ import sys
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
|
|
||||||
def process_raw_line(line):
|
def process_raw_line(line):
|
||||||
fields = line.rstrip().split(' ')
|
fields = line.rstrip().split(" ")
|
||||||
fields = [field.split(':')[-1] for field in fields]
|
fields = [field.split(":")[-1] for field in fields]
|
||||||
fields[5] = fields[5][1:-1]
|
fields[5] = fields[5][1:-1]
|
||||||
fields[0] = int(fields[0])
|
fields[0] = int(fields[0])
|
||||||
fields[1:] = list(map(np.float, fields[1:]))
|
fields[1:] = list(map(np.float, fields[1:]))
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
with open(sys.argv[1], 'r') as f:
|
|
||||||
|
with open(sys.argv[1], "r") as f:
|
||||||
data = list(map(process_raw_line, f.readlines()))
|
data = list(map(process_raw_line, f.readlines()))
|
||||||
|
|
||||||
df = pd.DataFrame(data)
|
df = pd.DataFrame(data)
|
||||||
df.columns = ['frame', 'Y', 'U', 'V', 'All', 'dB']
|
df.columns = ["frame", "Y", "U", "V", "All", "dB"]
|
||||||
df['inv'] = [1 - value for value in df['All']]
|
df["inv"] = [1 - value for value in df["All"]]
|
||||||
|
|
||||||
print(f'Mean overall SSIM: {df["All"].mean()}')
|
print(f'Mean overall SSIM: {df["All"].mean()}')
|
||||||
print(f'Median overall SSIM: {df["All"].median()}')
|
print(f'Median overall SSIM: {df["All"].median()}')
|
||||||
print(f'Frame with worst SSIM: {df.idxmin()["All"]+1}')
|
print(f'Frame with worst SSIM: {df.idxmin()["All"]+1}')
|
||||||
print(f'Frame with best SSIM: {df.idxmax()["All"]+1}')
|
print(f'Frame with best SSIM: {df.idxmax()["All"]+1}')
|
||||||
|
|
||||||
print(tabulate(
|
print(
|
||||||
[(key, value * 100) for key, value in [
|
tabulate(
|
||||||
['best', df['All'].max()],
|
[
|
||||||
[50, 1 - df['inv'].quantile(0.50)],
|
(key, value * 100)
|
||||||
[66.6, 1 - df['inv'].quantile(0.666)],
|
for key, value in [
|
||||||
[75, 1 - df['inv'].quantile(0.75)],
|
["best", df["All"].max()],
|
||||||
[80, 1 - df['inv'].quantile(0.80)],
|
[50, 1 - df["inv"].quantile(0.50)],
|
||||||
[90, 1 - df['inv'].quantile(0.90)],
|
[66.6, 1 - df["inv"].quantile(0.666)],
|
||||||
[95, 1 - df['inv'].quantile(0.95)],
|
[75, 1 - df["inv"].quantile(0.75)],
|
||||||
[98, 1 - df['inv'].quantile(0.98)],
|
[80, 1 - df["inv"].quantile(0.80)],
|
||||||
[99, 1 - df['inv'].quantile(0.99)],
|
[90, 1 - df["inv"].quantile(0.90)],
|
||||||
[99.9, 1 - df['inv'].quantile(0.999)],
|
[95, 1 - df["inv"].quantile(0.95)],
|
||||||
[100, df['All'].min()]
|
[98, 1 - df["inv"].quantile(0.98)],
|
||||||
]],
|
[99, 1 - df["inv"].quantile(0.99)],
|
||||||
headers=['% Frames', '≥ SSIM']
|
[99.9, 1 - df["inv"].quantile(0.999)],
|
||||||
))
|
[100, df["All"].min()],
|
||||||
|
]
|
||||||
|
],
|
||||||
|
headers=["% Frames", "≥ SSIM"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -3,11 +3,11 @@ import sys
|
||||||
import paho.mqtt.client as mqtt
|
import paho.mqtt.client as mqtt
|
||||||
import wordclock_credentials as creds
|
import wordclock_credentials as creds
|
||||||
|
|
||||||
client = mqtt.Client('wordclock.py')
|
client = mqtt.Client("wordclock.py")
|
||||||
|
|
||||||
client.username_pw_set(creds.USER, creds.PASSWORD)
|
client.username_pw_set(creds.USER, creds.PASSWORD)
|
||||||
client.connect(creds.MQTT_HOST, 1883, 60)
|
client.connect(creds.MQTT_HOST, 1883, 60)
|
||||||
|
|
||||||
client.publish('wordclock/color/red', sys.argv[2], retain=True)
|
client.publish("wordclock/color/red", sys.argv[2], retain=True)
|
||||||
client.publish('wordclock/color/green', sys.argv[1], retain=True)
|
client.publish("wordclock/color/green", sys.argv[1], retain=True)
|
||||||
client.publish('wordclock/color/blue', sys.argv[3], retain=True)
|
client.publish("wordclock/color/blue", sys.argv[3], retain=True)
|
||||||
|
|
Loading…
Reference in a new issue