Skip to content

Commit

Permalink
update musl wheels
Browse files Browse the repository at this point in the history
  • Loading branch information
KumaTea committed Mar 14, 2024
1 parent 004b1ef commit 8e7e98f
Show file tree
Hide file tree
Showing 7 changed files with 269 additions and 11 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ This project provides pre-built wheels of popular packages.
### Add to pip

```bash
pip config set global.extra-index-url https://ext.kmtea.eu/cdn
pip config set global.extra-index-url https://ext.kmtea.eu/simple
```

### Temporary use
Expand Down
8 changes: 4 additions & 4 deletions src/col_whl.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def get_release_assets(repo: str, tag: str, author: str = AUTHOR) -> list:
return result['assets']


def save_release_data(repo: str, tag: str, data: dict):
def save_release_data(repo: str, tag: str, data: list):
with open(f'{WORKDIR}/whl/data/{repo}_{tag}.json', 'w', encoding='utf-8') as json_file:
json.dump(data, json_file, indent=2)

Expand All @@ -30,11 +30,11 @@ def get_all_repo_data():
os.makedirs(f'{WORKDIR}/whl/data', exist_ok=True)

for repo in PROJECTS:
print(f'Fetching GitHub releases for {AUTHOR}/{repo}...')
print(f'\nFetching GitHub releases for {AUTHOR}/{repo}...')
tags = get_repo_release_tags(repo)
pbar = tqdm(tags)
for tag in tags:
pbar.set_description(f'Fetching {tag}...')
for tag in pbar:
pbar.set_description(f'Fetching {tag:>8}...')
assets = get_release_assets(repo, tag)
save_release_data(repo, tag, assets)

Expand Down
3 changes: 3 additions & 0 deletions src/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
PROJECTS = [
'pypy-wheels',
'riscv-wheels',
'musl-wheels',
'ext-whl',
'NextBot',
'pytorch-riscv64',
Expand All @@ -13,3 +14,5 @@
WORKDIR = '..'
else:
WORKDIR = '.'

LOCAL_WHL_DIR = r'E:\Cache\whl'
3 changes: 1 addition & 2 deletions src/gen_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,9 @@ def gen_cdn_index():
if os.name == 'nt':
hash_dict = get_saved_hash()
wheels = get_assets(hash_dict)
ps = gen_index(wheels)
if input('Check official index? ([Y]/n) ').lower() in ['', 'y']:
from tqdm import tqdm
for p in tqdm(ps):
for p in tqdm(gen_index(wheels)):
check_official(p)
else:
wheels = get_assets_from_html()
Expand Down
10 changes: 6 additions & 4 deletions src/gen_whl.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from conf import *
from tools import get_saved_hash, get_assets, check_dup
from tools import *


def gen_index(saved_hash: dict):
def gen_html_content(saved_hash: dict):
assets = get_assets(saved_hash)
check_dup(assets)
html = ''
Expand All @@ -23,7 +22,7 @@ def gen_index(saved_hash: dict):


def gen_html(saved_hash: dict):
index = gen_index(saved_hash)
index = gen_html_content(saved_hash)
with open(f'{WORKDIR}/whl/wheels.html', 'w', encoding='utf-8') as html_file:
html_file.write(index)

Expand All @@ -38,6 +37,9 @@ def gen_html_cdn():
if __name__ == '__main__':
if os.name == 'nt':
hash_dict = get_saved_hash()
local_whl = get_local_whl()
hash_dict = extend_hash_dict(hash_dict, local_whl)
save_hash(hash_dict)
gen_html(hash_dict)
else:
gen_html_cdn()
40 changes: 40 additions & 0 deletions src/tools.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import hashlib
import logging
from conf import *
from tqdm import tqdm
Expand All @@ -12,6 +13,12 @@ def get_saved_hash():
return {}


def save_hash(saved_hash: dict):
saved_hash = dict(sorted(saved_hash.items(), key=lambda x: x[0].lower()))
with open(f'{WORKDIR}/whl/data/sha256sums.json', 'w', encoding='utf-8') as json_file:
json.dump(saved_hash, json_file, indent=2)


def get_whl_sha256(name: str, saved_hash: dict):
if name in saved_hash:
return saved_hash[name]['sha256']
Expand Down Expand Up @@ -70,3 +77,36 @@ def get_assets_from_html() -> list:
pkgs.append({'name': pkg_filename, 'url': pkg_url})

return pkgs


def calculate_hash(file: str, algorithm: str = 'sha256') -> str:
with open(file, 'rb') as f:
hash_obj = hashlib.new(algorithm)
hash_obj.update(f.read())
return hash_obj.hexdigest()


def get_local_whl() -> list[tuple[str, str]]:
"""
Get all .whl files in LOCAL_WHL_DIR
:return: list of tuples (filename, path)
"""
whl_files = []
for root, dirs, files in os.walk(LOCAL_WHL_DIR):
for file in files:
if file.endswith('.whl'):
whl_files.append((file, os.path.join(root, file)))
return whl_files


def extend_hash_dict(saved_hash: dict, whl_files: list[tuple[str, str]]) -> dict:
saved_wheels = saved_hash.keys()
assert not any(name in saved_wheels for name, _ in whl_files)

print('Calculating hash for local wheels...')
for name, path in tqdm(whl_files):
saved_hash[name] = {
'sha256': calculate_hash(path),
'verify': False
}
return saved_hash
Loading

0 comments on commit 8e7e98f

Please sign in to comment.