From 3aafd33bb3509d3e070807b2f268bd2829b6df6e Mon Sep 17 00:00:00 2001 From: bb010g Date: Tue, 30 Jul 2019 04:54:53 -0700 Subject: [PATCH] Create Nix package expressions Hopefully these will get hooked up to CI later. Slight historical value included free! --- .gitignore | 20 +- default.nix | 123 +++ nix/ci.nix | 59 ++ nix/extended.nix | 83 ++ nix/overlay.nix | 18 + nix/telethon/1.4.nix | 56 ++ nix/telethon/1.5.nix | 60 ++ nix/telethon/1.6.nix | 50 ++ nix/telethon/1.7.nix | 66 ++ nix/telethon/1.8.nix | 35 + nix/telethon/1.9.nix | 35 + nix/telethon/common.nix | 60 ++ nix/telethon/devel.nix | 27 + .../generator-use-pathlib-to-1_4_3.patch | 819 ++++++++++++++++++ 14 files changed, 1503 insertions(+), 8 deletions(-) create mode 100644 default.nix create mode 100644 nix/ci.nix create mode 100644 nix/extended.nix create mode 100644 nix/overlay.nix create mode 100644 nix/telethon/1.4.nix create mode 100644 nix/telethon/1.5.nix create mode 100644 nix/telethon/1.6.nix create mode 100644 nix/telethon/1.7.nix create mode 100644 nix/telethon/1.8.nix create mode 100644 nix/telethon/1.9.nix create mode 100644 nix/telethon/common.nix create mode 100644 nix/telethon/devel.nix create mode 100644 nix/telethon/generator-use-pathlib-to-1_4_3.patch diff --git a/.gitignore b/.gitignore index a40abb8a..4d8f9baa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,13 +1,13 @@ # Docs -_build/ -docs/ +/_build/ +/docs/ # Generated code -telethon/tl/functions/ -telethon/tl/types/ -telethon/tl/patched/ -telethon/tl/alltlobjects.py -telethon/errors/rpcerrorlist.py +/telethon/tl/functions/ +/telethon/tl/types/ +/telethon/tl/patched/ +/telethon/tl/alltlobjects.py +/telethon/errors/rpcerrorlist.py # User session *.session @@ -79,7 +79,7 @@ instance/ .scrapy # Sphinx documentation -docs/_build/ +/docs/_build/ # PyBuilder target/ @@ -106,3 +106,7 @@ ENV/ # Rope project settings .ropeproject + +# Nix build results +result +result-* diff --git a/default.nix b/default.nix new file mode 100644 index 00000000..5685a681 --- /dev/null +++ b/default.nix @@ -0,0 +1,123 @@ +# A NUR-compatible package specification. +{ pkgs ? import {}, useRelease ? true }: + +rec { + # The `lib`, `modules`, and `overlay` names are special + lib = ({ pkgs }: { }) { inherit pkgs; }; # functions + modules = { }; # NixOS modules + overlays = { }; # nixpkgs overlays + + # # development + + # ## development.python-modules + + # use in a shell like + # ```nix + # ((pkgs.python3.override { + # packageOverrides = pythonPackageOverrides; + # }).withPackages (ps: [ ps.telethon ])).env + # ``` + pythonPackageOverrides = self: super: let + defaultTelethonArgs = { inherit useRelease; }; + telethonPkg = v: args: self.callPackage (./nix/telethon + "/${v}.nix") + (defaultTelethonArgs // args); + in rec { + telethon = telethon_1; + telethon-devel = self.callPackage ./nix/telethon/devel.nix { }; + + telethon_1 = telethon_1_9; + telethon_1_9 = telethon_1_9_0; + telethon_1_9_0 = telethonPkg "1.9" { version = "1.9.0"; }; + telethon_1_8 = telethon_1_8_0; + telethon_1_8_0 = telethonPkg "1.8" { version = "1.8.0"; }; + telethon_1_7 = telethon_1_7_7; + telethon_1_7_7 = telethonPkg "1.7" { version = "1.7.7"; }; + telethon_1_7_6 = telethonPkg "1.7" { version = "1.7.6"; }; + telethon_1_7_5 = telethonPkg "1.7" { version = "1.7.5"; }; + telethon_1_7_4 = telethonPkg "1.7" { version = "1.7.4"; }; + telethon_1_7_3 = telethonPkg "1.7" { version = "1.7.3"; }; + telethon_1_7_2 = telethonPkg "1.7" { version = "1.7.2"; }; + telethon_1_7_1 = telethonPkg "1.7" { version = "1.7.1"; }; + telethon_1_7_0 = telethonPkg "1.7" { version = "1.7.0"; }; + telethon_1_6 = telethon_1_6_2; + telethon_1_6_2 = telethonPkg "1.6" { version = "1.6.2"; }; + # 1.6.1.post1: hotpatch that fixed Telethon.egg-info dir perms + telethon_1_6_1 = telethonPkg "1.6" { version = "1.6.1"; }; + telethon_1_6_0 = telethonPkg "1.6" { version = "1.6.0"; }; + telethon_1_5 = telethon_1_5_5; + telethon_1_5_5 = telethonPkg "1.5" { version = "1.5.5"; }; + telethon_1_5_4 = telethonPkg "1.5" { version = "1.5.4"; }; + telethon_1_5_3 = telethonPkg "1.5" { version = "1.5.3"; }; + telethon_1_5_2 = telethonPkg "1.5" { version = "1.5.2"; }; + telethon_1_5_1 = telethonPkg "1.5" { version = "1.5.1"; }; + telethon_1_5_0 = telethonPkg "1.5" { version = "1.5.0"; }; + telethon_1_4 = telethon_1_4_3; + telethon_1_4_3 = telethonPkg "1.4" { version = "1.4.3"; }; + telethon_1_4_2 = telethonPkg "1.4" { version = "1.4.2"; }; + telethon_1_4_1 = telethonPkg "1.4" { version = "1.4.1"; }; + telethon_1_4_0 = telethonPkg "1.4" { version = "1.4.0"; }; + #telethon_1_3_0 + #telethon_1_2_0 + #telethon_1_1_1 + #telethon_1_1_0 + #telethon_1_0_4 + #telethon_1_0_3 + #telethon_1_0_2 + #telethon_1_0_1 + #telethon_1_0_0-rc1 + #telethon_1_0_0 + #telethon_0_19_1 + #telethon_0_19_0 + #telethon_0_18_3 + #telethon_0_18_2 + #telethon_0_18_1 + #telethon_0_18_0 + #telethon_0_17_4 + #telethon_0_17_3 + #telethon_0_17_2 + #telethon_0_17_1 + #telethon_0_17_0 + #telethon_0_16_2 + #telethon_0_16_1 + #telethon_0_16_0 + #telethon_0_15_5 + #telethon_0_15_4 + #telethon_0_15_3 + #telethon_0_15_2 + #telethon_0_15_1 + #telethon_0_15_0 + #telethon_0_14_2 + #telethon_0_14_1 + #telethon_0_14_0 + #telethon_0_13_6 + #telethon_0_13_5 + #telethon_0_13_4 + #telethon_0_13_3 + #telethon_0_13_2 + #telethon_0_13_1 + #telethon_0_13_0 + #telethon_0_12_2 + #telethon_0_12_1 + #telethon_0_12_0 + #telethon_0_11_5 + #telethon_0_11_4 + #telethon_0_11_3 + #telethon_0_11_2 + #telethon_0_11_1 + #telethon_0_11_0 + #telethon_0_10_1 + #telethon_0_10_0 + #telethon_0_9_1 + #telethon_0_9_0 + #telethon_0_8_0 + #telethon_0_7_1 + #telethon_0_7_0 + #telethon_0_6_0 + #telethon_0_5_0 + #telethon_0_4_0 + #telethon_0_3_0 + #telethon_0_2_0 + #telethon_0_1_0 + }; +} + diff --git a/nix/ci.nix b/nix/ci.nix new file mode 100644 index 00000000..fe6bda54 --- /dev/null +++ b/nix/ci.nix @@ -0,0 +1,59 @@ +# This file provides all the buildable and cacheable packages and +# package outputs in you package set. These are what gets built by CI, +# so if you correctly mark packages as +# +# - broken (using `meta.broken`), +# - unfree (using `meta.license.free`), and +# - locally built (using `preferLocalBuild`) +# +# then your CI will be able to build and cache only those packages for +# which this is possible. + +{ pkgs ? import {}, enableEnvs ? false }: + +with builtins; + +let + + isReserved = n: n == "lib" || n == "overlays" || n == "modules"; + isDerivation = p: isAttrs p && p ? type && p.type == "derivation"; + isBuildable = p: !(p.meta.broken or false) && p.meta.license.free or true; + isCacheable = p: !(p.preferLocalBuild or false); + shouldRecurseForDerivations = p: + isAttrs p && p.recurseForDerivations or false; + + nameValuePair = n: v: { name = n; value = v; }; + + concatMap = builtins.concatMap or (f: xs: concatLists (map f xs)); + + flattenPkgs = s: + let + f = p: + if shouldRecurseForDerivations p then flattenPkgs p + else if isDerivation p then [p] + else []; + in + concatMap f (attrValues s); + + outputsOf = p: map (o: p.${o}) p.outputs; + + # build & test packages across Python versions + # (withPackages "distributions" are also generated for testing) + nurAttrs = import ./extended.nix { inherit pkgs enableEnvs; }; + + nurPkgs = + flattenPkgs + (listToAttrs + (map (n: nameValuePair n nurAttrs.${n}) + (filter (n: !isReserved n) + (attrNames nurAttrs)))); + +in + +rec { + buildPkgs = filter isBuildable nurPkgs; + cachePkgs = filter isCacheable buildPkgs; + + buildOutputs = concatMap outputsOf buildPkgs; + cacheOutputs = concatMap outputsOf cachePkgs; +} diff --git a/nix/extended.nix b/nix/extended.nix new file mode 100644 index 00000000..01a087ed --- /dev/null +++ b/nix/extended.nix @@ -0,0 +1,83 @@ +{ pkgs ? import { }, enableEnvs ? true, useRelease ? true }: + +# packages built against all Python versions (along with withPackages +# environments for testing) + +# to use for testing, you'll probably want a variant of: +# ```sh +# nix-shell nix/extended.nix -A telethon-devel-python37 --run "python" +# ``` + +let + inherit (pkgs.lib) attrNames attrValues concatMap head listToAttrs + mapAttrsToList optional optionals tail; + nurAttrs = import ../default.nix { inherit pkgs useRelease; }; + + pyVersions = concatMap (n: optional (pkgs ? ${n}) n) [ + "python3" + "python35" + "python36" + "python37" + # "pypy3" + # "pypy35" + # "pypy36" + # "pypy37" + ]; + + pyPkgEnvs = [ + [ "telethon" "telethon" ] + [ "telethon-devel" "telethon-devel" ] + + [ "telethon_1" "telethon_1" ] + [ "telethon_1_9" "telethon_1_9" ] + [ "telethon_1_9_0" "telethon_1_9_0" ] + [ "telethon_1_8" "telethon_1_8" ] + [ "telethon_1_8_0" "telethon_1_8_0" ] + [ "telethon_1_7" "telethon_1_7" ] + [ "telethon_1_7_7" "telethon_1_7_7" ] + [ "telethon_1_7_6" "telethon_1_7_6" ] + [ "telethon_1_7_5" "telethon_1_7_5" ] + [ "telethon_1_7_4" "telethon_1_7_4" ] + [ "telethon_1_7_3" "telethon_1_7_3" ] + [ "telethon_1_7_2" "telethon_1_7_2" ] + [ "telethon_1_7_1" "telethon_1_7_1" ] + [ "telethon_1_7_0" "telethon_1_7_0" ] + [ "telethon_1_6" "telethon_1_6" ] + [ "telethon_1_6_2" "telethon_1_6_2" ] + [ "telethon_1_6_1" "telethon_1_6_1" ] + [ "telethon_1_6_0" "telethon_1_6_0" ] + [ "telethon_1_5" "telethon_1_5" ] + [ "telethon_1_5_5" "telethon_1_5_5" ] + [ "telethon_1_5_4" "telethon_1_5_4" ] + [ "telethon_1_5_3" "telethon_1_5_3" ] + [ "telethon_1_5_2" "telethon_1_5_2" ] + [ "telethon_1_5_1" "telethon_1_5_1" ] + [ "telethon_1_5_0" "telethon_1_5_0" ] + [ "telethon_1_4" "telethon_1_4" ] + [ "telethon_1_4_3" "telethon_1_4_3" ] + # [ "telethon_1_4_2" "telethon_1_4_2" ] + # [ "telethon_1_4_1" "telethon_1_4_1" ] + # [ "telethon_1_4_0" "telethon_1_4_0" ] + ]; + + getPkgPair = pkgs: n: let p = pkgs.${n}; in { name = n; value = p; }; + getPkgPairs = pkgs: map (getPkgPair pkgs); + pyPkgPairs = py: + concatMap (d: map (getPkgPair py.pkgs) (tail d)) pyPkgEnvs; + pyPkgEnvPair = pyNm: py: envNm: env: { + name = "${envNm}-env-${pyNm}"; + value = (py.withPackages (ps: map (pn: ps.${pn}) env)).overrideAttrs (o: { + name = "${envNm}-${py.name}-env"; + preferLocalBuild = true; + }); + }; + pyNurPairs = pyNm: py: + map ({ name, value }: { name = "${name}-${pyNm}"; inherit value; }) + (pyPkgPairs py) ++ + optionals enableEnvs + (map (d: pyPkgEnvPair pyNm py (head d) (tail d)) pyPkgEnvs); +in nurAttrs // (listToAttrs (concatMap (py: let + python = pkgs.${py}.override { + packageOverrides = nurAttrs.pythonPackageOverrides; + }; in + pyNurPairs py python) pyVersions)) diff --git a/nix/overlay.nix b/nix/overlay.nix new file mode 100644 index 00000000..122729de --- /dev/null +++ b/nix/overlay.nix @@ -0,0 +1,18 @@ +# You can use this file as a nixpkgs overlay. This is useful in the +# case where you don't want to add the whole NUR namespace to your +# configuration. + +self: super: + +let + + isReserved = n: n == "lib" || n == "overlays" || n == "modules"; + nameValuePair = n: v: { name = n; value = v; }; + nurAttrs = import ./default.nix { pkgs = super; }; + +in + + builtins.listToAttrs + (map (n: nameValuePair n nurAttrs.${n}) + (builtins.filter (n: !isReserved n) + (builtins.attrNames nurAttrs))) diff --git a/nix/telethon/1.4.nix b/nix/telethon/1.4.nix new file mode 100644 index 00000000..c2eceb7c --- /dev/null +++ b/nix/telethon/1.4.nix @@ -0,0 +1,56 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, async_generator, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null && fetchpatch != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.4.3" = { + pypiSha256 = "1igslvhd743qy9p4kfs7lg09s8d5vhn9jhzngpv12797569p4lcj"; + sourceSha256 = "19vz0ppk7lq1dmqzf47n6h023i08pqvcwnixvm28vrijykq0z315"; + }; + "1.4.2" = { + pypiSha256 = "1f4ncyfzqj4b6zib0417r01pgnd0hb1p4aiinhlkxkmk7vy5fqfy"; + sourceSha256 = "0rsbz5kqp0d10gasadir3mgalc9aqq4fcv8xa1p7fg263f43rjl4"; + }; + "1.4.1" = { + pypiSha256 = "1n0jhdqflinyamzy5krnww7hc0s7pw9yfck1p7816pdbgir74qsw"; + sourceSha256 = "07q48gw4ry3wf9yzi6kf8lw3b23a0dvk9r8sabpxwrlqy7gnksxx"; + }; + "1.4.0" = { + version = "1.4"; + pypiSha256 = "1g7rznwmj87n9k86zby9i75h570hm84izrv0srhsmxi52pjan1ml"; + sourceSha256 = "14nv86yrj01wmlj5cfg6iq5w03ssl67av1arfy9mq1935mly5nly"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + patches = lib.optionals (!useRelease) [ + (if (lib.versionOlder version "1.4.3") then + common.patches.generator-use-pathlib-to-1_4_3 + else + common.patches.generator-use-pathlib-from-1_4_3-to-1_5_0) + common.patches.generator-use-pathlib-open-to-1_5_3 + common.patches.sort-generated-tlobjects-to-1_7_1 + ]; + + propagatedBuildInputs = [ async_generator rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/1.5.nix b/nix/telethon/1.5.nix new file mode 100644 index 00000000..3d376da0 --- /dev/null +++ b/nix/telethon/1.5.nix @@ -0,0 +1,60 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, async_generator, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null && fetchpatch != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.5.5" = { + pypiSha256 = "1qpc4vc3lidhlp1c7521nxizjr6y5c3l9x41knqv02x8n3l9knxa"; + sourceSha256 = "1x5niscjbrg5a0cg261z6awln57v3nn8si5j58vhsnckws2c48a5"; + }; + "1.5.4" = { + pypiSha256 = "1kjqi3wy4hswsf3vmrjg7z5c3f9wpdfk4wz1yfsqmj9ppwllkjsj"; + sourceSha256 = "0rmp9zk7a354nb39c01mjcrhi2j6v9im40xmdcvmizx990vlv476"; + }; + "1.5.3" = { + pypiSha256 = "11xd5ni0chzsfny0vwwqyh37mvmrwrk2bmkhwp1ipbxyis8jjjia"; + sourceSha256 = "1l3i6wx3fgcy3vmr75qdbv5fvc5qnk0j47hv7jszsqq9rvqvz2xs"; + }; + "1.5.2" = { + pypiSha256 = "0ymv6l9xn41sgpkilqkivwbjna89m43i0a728lak2cppp7i1i1h7"; + sourceSha256 = "0gnqvlhh3qyvibl7icn6774rshlx1nnhb5f78609da44743lyv17"; + }; + "1.5.1" = { + pypiSha256 = "1ypxpsfj814gzln4fl7z17l1l6q0bzd5p1ivas85yim3a992ixww"; + sourceSha256 = "15w5nshvmj8hgqdcbpw0fjcf1cspaci8dldm9ml1pmijw7zgmpdg"; + }; + "1.5.0" = { + version = "1.5"; + pypiSha256 = "1kzkzcxyz7adjzvm2ml9faz2c5yx469j211yvi5xfvjwp58ic2jc"; + sourceSha256 = "12232d3xfv0bbykk9xaxpxsr3656ywjx4ra1q5q99rpp6wv438n1"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + patches = lib.optionals (!useRelease) ([ + common.patches.sort-generated-tlobjects-to-1_7_1 + ] ++ lib.optional (lib.versionOlder version "1.5.3") + common.patches.generator-use-pathlib-open-to-1_5_3); + + propagatedBuildInputs = [ async_generator rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/1.6.nix b/nix/telethon/1.6.nix new file mode 100644 index 00000000..b8033194 --- /dev/null +++ b/nix/telethon/1.6.nix @@ -0,0 +1,50 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null && fetchpatch != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.6.2" = { + pypiSha256 = "074h5gj0c330rb1nxzpqm31fp1vw7calh1cdkapbjx90j769iz18"; + sourceSha256 = "1daqlb4sva5qkljzbjr8xvjfgp7bdcrl2li1i4434za6a0isgd3j"; + }; + "1.6.1" = { + # hotpatch with missing .pyc files and fixed Telethon.egg-info perms + pypiVersion = "1.6.1.post1"; + pypiSha256 = "17s1qp69bbj6jniam9wbcpaj60ah56sjw0q3kr8ca28y17s88si7"; + # pypiVersion = "1.6.1"; + # pypiSha256 = "036lhr1jr79np74c6ih51c4pjy828r3lvwcq07q5wynyjprm1qbz"; + sourceSha256 = "1hk1bpnk51rpsifb67s31c2qph5hmw28i2vgh97i4i56vynx2yxz"; + }; + "1.6.0" = { + version = "1.6"; + pypiSha256 = "06prmld9068zcm9rfmq3rpq1szw72c6dkxl62b035i9w8wdpvg0m"; + sourceSha256 = "0qk14mrnvv9a043ik0y2w6q97l83abvbvn441zn2jl00w4ykfqrh"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + patches = lib.optional (!useRelease) + common.patches.sort-generated-tlobjects-to-1_7_1; + + propagatedBuildInputs = [ rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/1.7.nix b/nix/telethon/1.7.nix new file mode 100644 index 00000000..28768111 --- /dev/null +++ b/nix/telethon/1.7.nix @@ -0,0 +1,66 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.7.7" = { + pypiSha256 = "0mgpihjc7g4gfrq57srripdavxbsgivn4qsjanv3yds5drskciv0"; + sourceSha256 = "08c3iakd7fyacc79pg8hyzpa6zx3gbp7xivi10af34zj775lp2pi"; + }; + "1.7.6" = { + pypiSha256 = "192xda98685s3hmz7ircxpsn7yq913y0r1kmqrsav90m4g4djn4j"; + sourceSha256 = "1ss2pfpd3hby25g9ighbr7ccp66awfzda4srsnvr9s6i28har6ag"; + }; + "1.7.5" = { + pypiSha256 = "0i5s7ahicw5k0s1i7pi26vc6rp6ppr1gr848sa61yh3qqa4c0qnr"; + sourceSha256 = "1rssh0l466h9y6v0z095c9aa63nz9im7gg5771jjj5w70mkpm5w6"; + }; + "1.7.4" = { + pypiSha256 = "1qpc9f1y559zdwz59qqz4hbf1mrynjjbcg357nzaa2x5a2q4lz0s"; + sourceSha256 = "1q43lwfp67q4skfcrb6sdlnjw4ajrpizf08fd9wjrw521kkd8g4y"; + }; + "1.7.3" = { + pypiSha256 = "0s8qmsarlfgpb0k3w50siv354hpa7b1dnrjjd0iqz7vc5bc7ni84"; + sourceSha256 = "0c393smp1qm8kk39r0k31p74p89qzvjdjxq4bxq75h07a1yqbs8x"; + }; + "1.7.2" = { + pypiSha256 = "0465dwikhpbka2sj1g952rac03jkixq497gbmmyx2i9xb594db27"; + sourceSha256 = "1gw09zbaqvn074skwjhmm4yp8p75rw9njwjbkcfvqb4gr6dg8wpq"; + }; + "1.7.1" = { + pypiSha256 = "186z6imf7zqy8vf4yv2w2kxpd7lxmfppa1qi8nxjdgq8rz7wbglf"; + sourceSha256 = "05mpqfj4w5qxyl1ai5p0f31pkagz55xxh8060r8y9i3d44j9bn1c"; + }; + "1.7.0" = { + version = "1.7"; + pypiSha256 = "06cqb121k2y0h3x7gvckyvbsn97wc1a25pghinxz2vb7vg8wwxvw"; + sourceSha256 = "0myx32hqax71ijfw6ksxvk27cb6x06kbz8jb7ib9d1cayr2viir6"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + patches = lib.optional (!useRelease && lib.versionOlder version "1.7.1") + common.patches.sort-generated-tlobjects-to-1_7_1; + + propagatedBuildInputs = [ rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/1.8.nix b/nix/telethon/1.8.nix new file mode 100644 index 00000000..9f52156b --- /dev/null +++ b/nix/telethon/1.8.nix @@ -0,0 +1,35 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.8.0" = { + pypiSha256 = "099br8ldjrfzwipv7g202lnjghmqj79j6gicgx11s0vawb5mb3vf"; + sourceSha256 = "1q5mcijmjw2m2v3ilw28xnavmcdck5md0k98kwnz0kyx4iqckcv0"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + + propagatedBuildInputs = [ rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/1.9.nix b/nix/telethon/1.9.nix new file mode 100644 index 00000000..04ada22e --- /dev/null +++ b/nix/telethon/1.9.nix @@ -0,0 +1,35 @@ +{ lib, buildPythonPackage, pythonOlder +, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null +, pyaes, rsa +, version +, useRelease ? true +}: + +assert useRelease -> fetchPypi != null; +assert !useRelease -> fetchFromGitHub != null; +let + common = import ./common.nix { + inherit lib fetchFromGitHub fetchPypi fetchpatch; + }; + versions = { + "1.9.0" = { + pypiSha256 = "1p4y4qd1ndzi1lg4fhnvq1rqz7611yrwnwwvzh63aazfpzaplyd8"; + sourceSha256 = "1g6khxc7mvm3q8rqksw9dwn4l2w8wzvr3zb74n2lb7g5ilpxsadd"; + }; + }; +in buildPythonPackage rec { + pname = "telethon"; + inherit version; + + src = common.fetchTelethon { + inherit useRelease version; + versionData = versions.${version}; + }; + + propagatedBuildInputs = [ rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/common.nix b/nix/telethon/common.nix new file mode 100644 index 00000000..96b115c4 --- /dev/null +++ b/nix/telethon/common.nix @@ -0,0 +1,60 @@ +{ lib, fetchFromGitHub ? null, fetchPypi ? null, fetchpatch ? null }: + +rec { + fetchTelethon = { useRelease, version, versionData }: + if useRelease then assert versionData.pypiSha256 != null; fetchPypi { + pname = "Telethon"; + version = versionData.pypiVersion or (versionData.version or version); + sha256 = versionData.pypiSha256; + } else assert versionData.sourceSha256 != null; fetchFromGitHub { + owner = "LonamiWebs"; + repo = "Telethon"; + rev = versionData.rev or "v${versionData.version or version}"; + sha256 = versionData.sourceSha256; + }; + + fetchpatchTelethon = { rev, ... } @ args: + fetchpatch ({ + url = "https://github.com/LonamiWebs/Telethon/commit/${rev}.patch"; + } // (builtins.removeAttrs args [ "rev" ])); + + # sorted by name, then by logical version range + patches = rec { + generator-use-pathlib-to-1_4_3 = ./generator-use-pathlib-to-1_4_3.patch; + generator-use-pathlib-from-1_4_3-to-1_5_0 = [ + (fetchpatchTelethon { + rev = "e71c556ca71aec11166dc66f949a05e700aeb24f"; + sha256 = "058phfaggf22j0cjpy9j17y63zgd9m8j4qf7ldsg0jqm1vrym76w"; + }) + (fetchpatchTelethon { + rev = "8224e5aabf18bb31c6af8c460c38ced11756f080"; + sha256 = "0x3xfkld4d2kc0a1a8ldxy85pi57zaipq3b401b16r6rzbi4sh1j"; + }) + (fetchpatchTelethon { + rev = "aefa429236d28ae68bec4e4ef9f12d13f647dfe6"; + sha256 = "043hks8hg5sli1amfv5453h831nwy4dgyw8xr4xxfaxh74754icx"; + }) + ]; + generator-use-pathlib-open-to-1_5_3 = fetchpatchTelethon { + rev = "b57e3e3e0a752903fe7d539fb87787ec6712a3d9"; + sha256 = "1rl3lkwfi3h62ppzglrmz13zfai8i8cchzqgbjccr4l7nzh1n6nq"; + }; + sort-generated-tlobjects-to-1_7_1 = fetchpatchTelethon { + rev = "08f8aa3c526c043c107ec1b489b89c011555722f"; + sha256 = "1lkvvjzhm9jfrxpm4hbvvysz5f3qi0v4f7vqnfmrzawl73s8qk80"; + }; + }; + + meta = let inherit (lib) licenses maintainers; in { + description = "Full-featured Telegram client library for Python 3"; + fullDescription = '' + Telegram is a popular messaging application. This library is meant to + make it easy for you to write Python programs that can interact with + Telegram. Think of it as a wrapper that has already done the heavy job + for you, so you can focus on developing an application. + ''; + homepage = https://github.com/LonamiWebs/Telethon; + license = licenses.mit; + maintainers = [ maintainers.bb010g maintainers.nyanloutre ]; + }; +} diff --git a/nix/telethon/devel.nix b/nix/telethon/devel.nix new file mode 100644 index 00000000..8d5014b1 --- /dev/null +++ b/nix/telethon/devel.nix @@ -0,0 +1,27 @@ +{ lib, buildPythonPackage, nix-gitignore, pythonOlder +, async_generator, pyaes, rsa +}: + +let + common = import ./common.nix { inherit lib; }; +in buildPythonPackage rec { + pname = "telethon"; + # If pinning to a specific commit, use the following output instead: + # ```sh + # TZ=UTC git show -s --format=format:%cd --date=short-local + # ``` + version = "HEAD"; + + src = nix-gitignore.gitignoreSource '' + /.git + /default.nix + /nix + '' ../..; + + propagatedBuildInputs = [ async_generator rsa pyaes ]; + + doCheck = false; # No tests available + + disabled = pythonOlder "3.5"; + meta = common.meta; +} diff --git a/nix/telethon/generator-use-pathlib-to-1_4_3.patch b/nix/telethon/generator-use-pathlib-to-1_4_3.patch new file mode 100644 index 00000000..ec69338d --- /dev/null +++ b/nix/telethon/generator-use-pathlib-to-1_4_3.patch @@ -0,0 +1,819 @@ +--- a/setup.py ++++ b/setup.py +@@ -12,10 +12,11 @@ + + import itertools + import json +-import os + import re + import shutil +-from codecs import open ++from os import chdir ++from pathlib import Path ++from subprocess import run + from sys import argv + + from setuptools import find_packages, setup +@@ -29,30 +30,29 @@ + self.original = None + + def __enter__(self): +- self.original = os.path.abspath(os.path.curdir) +- os.chdir(os.path.abspath(os.path.dirname(__file__))) ++ self.original = Path('.') ++ chdir(str(Path(__file__).parent)) + return self + + def __exit__(self, *args): +- os.chdir(self.original) ++ chdir(str(self.original)) + + +-GENERATOR_DIR = 'telethon_generator' +-LIBRARY_DIR = 'telethon' ++GENERATOR_DIR = Path('telethon_generator') ++LIBRARY_DIR = Path('telethon') + +-ERRORS_IN_JSON = os.path.join(GENERATOR_DIR, 'data', 'errors.json') +-ERRORS_IN_DESC = os.path.join(GENERATOR_DIR, 'data', 'error_descriptions') +-ERRORS_OUT = os.path.join(LIBRARY_DIR, 'errors', 'rpcerrorlist.py') ++ERRORS_IN_JSON = GENERATOR_DIR / 'data/errors.json' ++ERRORS_IN_DESC = GENERATOR_DIR / 'data/error_descriptions' ++ERRORS_OUT = LIBRARY_DIR / 'errors/rpcerrorlist.py' + +-INVALID_BM_IN = os.path.join(GENERATOR_DIR, 'data', 'invalid_bot_methods.json') ++INVALID_BM_IN = GENERATOR_DIR / 'data/invalid_bot_methods.json' + +-TLOBJECT_IN_CORE_TL = os.path.join(GENERATOR_DIR, 'data', 'mtproto_api.tl') +-TLOBJECT_IN_TL = os.path.join(GENERATOR_DIR, 'data', 'telegram_api.tl') +-TLOBJECT_OUT = os.path.join(LIBRARY_DIR, 'tl') ++TLOBJECT_IN_TLS = [Path(x) for x in GENERATOR_DIR.glob('data/*.tl')] ++TLOBJECT_OUT = LIBRARY_DIR / 'tl' + IMPORT_DEPTH = 2 + +-DOCS_IN_RES = os.path.join(GENERATOR_DIR, 'data', 'html') +-DOCS_OUT = 'docs' ++DOCS_IN_RES = GENERATOR_DIR / 'data/html' ++DOCS_OUT = Path('docs') + + + def generate(which): +@@ -60,15 +60,12 @@ + from telethon_generator.generators import\ + generate_errors, generate_tlobjects, generate_docs, clean_tlobjects + +- # Older Python versions open the file as bytes instead (3.4.2) +- with open(INVALID_BM_IN, 'r') as f: ++ with INVALID_BM_IN.open('r') as f: + invalid_bot_methods = set(json.load(f)) +- +- layer = find_layer(TLOBJECT_IN_TL) ++ layer = next(filter(None, map(find_layer, TLOBJECT_IN_TLS))) + errors = list(parse_errors(ERRORS_IN_JSON, ERRORS_IN_DESC)) +- tlobjects = list(itertools.chain( +- parse_tl(TLOBJECT_IN_CORE_TL, layer, invalid_bot_methods), +- parse_tl(TLOBJECT_IN_TL, layer, invalid_bot_methods))) ++ tlobjects = list(itertools.chain(*( ++ parse_tl(file, layer, invalid_bot_methods) for file in TLOBJECT_IN_TLS))) + + if not which: + which.extend(('tl', 'errors')) +@@ -96,30 +93,29 @@ + which.remove('errors') + print(action, 'RPCErrors...') + if clean: +- if os.path.isfile(ERRORS_OUT): +- os.remove(ERRORS_OUT) ++ if ERRORS_OUT.is_file(): ++ ERRORS_OUT.unlink() + else: +- with open(ERRORS_OUT, 'w', encoding='utf-8') as file: ++ with ERRORS_OUT.open('w') as file: + generate_errors(errors, file) + + if 'docs' in which: + which.remove('docs') + print(action, 'documentation...') + if clean: +- if os.path.isdir(DOCS_OUT): +- shutil.rmtree(DOCS_OUT) ++ if DOCS_OUT.is_dir(): ++ shutil.rmtree(str(DOCS_OUT)) + else: + generate_docs(tlobjects, methods, layer, DOCS_IN_RES, DOCS_OUT) + + if 'json' in which: + which.remove('json') + print(action, 'JSON schema...') +- mtproto = 'mtproto_api.json' +- telegram = 'telegram_api.json' ++ json_files = [x.with_suffix('.json') for x in TLOBJECT_IN_TLS] + if clean: +- for x in (mtproto, telegram): +- if os.path.isfile(x): +- os.remove(x) ++ for file in json_files: ++ if file.is_file(): ++ file.unlink() + else: + def gen_json(fin, fout): + methods = [] +@@ -131,8 +130,8 @@ + with open(fout, 'w') as f: + json.dump(what, f, indent=2) + +- gen_json(TLOBJECT_IN_CORE_TL, mtproto) +- gen_json(TLOBJECT_IN_TL, telegram) ++ for fin, fout in zip(TLOBJECT_IN_TLS, json_files): ++ gen_json(fin, fout) + + if which: + print('The following items were not understood:', which) +@@ -156,22 +155,17 @@ + print('Packaging for PyPi aborted, importing the module failed.') + return + +- # Need python3.5 or higher, but Telethon is supposed to support 3.x +- # Place it here since noone should be running ./setup.py pypi anyway +- from subprocess import run +- from shutil import rmtree +- + for x in ('build', 'dist', 'Telethon.egg-info'): +- rmtree(x, ignore_errors=True) ++ shutil.rmtree(x, ignore_errors=True) + run('python3 setup.py sdist', shell=True) + run('python3 setup.py bdist_wheel', shell=True) + run('twine upload dist/*', shell=True) + for x in ('build', 'dist', 'Telethon.egg-info'): +- rmtree(x, ignore_errors=True) ++ shutil.rmtree(x, ignore_errors=True) + + else: + # e.g. install from GitHub +- if os.path.isdir(GENERATOR_DIR): ++ if GENERATOR_DIR.is_dir(): + generate(['tl', 'errors']) + + # Get the long description from the README file +--- a/telethon_generator/docswriter.py ++++ b/telethon_generator/docswriter.py +@@ -2,0 +2,0 @@ + + + class DocsWriter: +- """Utility class used to write the HTML files used on the documentation""" +- def __init__(self, filename, type_to_path): +- """Initializes the writer to the specified output file, +- creating the parent directories when used if required. +- +- 'type_to_path_function' should be a function which, given a type +- name and a named argument relative_to, returns the file path for +- the specified type, relative to the given filename ++ """ ++ Utility class used to write the HTML files used on the documentation. ++ """ ++ def __init__(self, root, filename, type_to_path): + """ ++ Initializes the writer to the specified output file, ++ creating the parent directories when used if required. ++ """ ++ self.root = root + self.filename = filename ++ self._parent = str(self.filename.parent) + self.handle = None ++ self.title = '' + + # Should be set before calling adding items to the menu + self.menu_separator_tag = None + +- # Utility functions TODO There must be a better way +- self.type_to_path = lambda t: type_to_path( +- t, relative_to=self.filename +- ) ++ # Utility functions ++ self.type_to_path = lambda t: self._rel(type_to_path(t)) + + # Control signals + self.menu_began = False +@@ -30,11 +30,20 @@ + self.write_copy_script = False + self._script = '' + ++ def _rel(self, path): ++ """ ++ Get the relative path for the given path from the current ++ file by working around https://bugs.python.org/issue20012. ++ """ ++ return os.path.relpath(str(path), self._parent) ++ + # High level writing +- def write_head(self, title, relative_css_path, default_css): ++ def write_head(self, title, css_path, default_css): + """Writes the head part for the generated document, + with the given title and CSS + """ ++ # ++ self.title = title + self.write( + ''' + +@@ -54,17 +63,17 @@ + +
''', + title=title, +- rel_css=relative_css_path.rstrip('/'), ++ rel_css=self._rel(css_path), + def_css=default_css + ) + +- def set_menu_separator(self, relative_image_path): ++ def set_menu_separator(self, img): + """Sets the menu separator. + Must be called before adding entries to the menu + """ +- if relative_image_path: +- self.menu_separator_tag = \ +- '/'.format(relative_image_path) ++ if img: ++ self.menu_separator_tag = '/'.format( ++ self._rel(img)) + else: + self.menu_separator_tag = None + +@@ -80,7 +89,7 @@ + + self.write('
  • ') + if link: +- self.write('', link) ++ self.write('', self._rel(link)) + + # Write the real menu entry text + self.write(name) +@@ -210,7 +219,7 @@ + if bold: + self.write('') + if link: +- self.write('', link) ++ self.write('', self._rel(link)) + + # Finally write the real table data, the given text + self.write(text) +@@ -278,10 +287,7 @@ + # With block + def __enter__(self): + # Sanity check +- parent = os.path.dirname(self.filename) +- if parent: +- os.makedirs(parent, exist_ok=True) +- ++ self.filename.parent.mkdir(parents=True, exist_ok=True) + self.handle = open(self.filename, 'w', encoding='utf-8') + return self + +--- a/telethon_generator/generators/docs.py ++++ b/telethon_generator/generators/docs.py +@@ -1,7 +1,6 @@ + #!/usr/bin/env python3 +-import csv + import functools +-import os + import re + import shutil + from collections import defaultdict ++from pathlib import Path + + from ..docswriter import DocsWriter + from ..parsers import TLObject, Usability +@@ -35,41 +34,33 @@ + + def _get_create_path_for(root, tlobject, make=True): + """Creates and returns the path for the given TLObject at root.""" +- out_dir = 'methods' if tlobject.is_function else 'constructors' ++ # TODO Can we pre-create all required directories? ++ out_dir = root / ('methods' if tlobject.is_function else 'constructors') + if tlobject.namespace: +- out_dir = os.path.join(out_dir, tlobject.namespace) ++ out_dir /= tlobject.namespace + +- out_dir = os.path.join(root, out_dir) + if make: +- os.makedirs(out_dir, exist_ok=True) +- return os.path.join(out_dir, _get_file_name(tlobject)) ++ out_dir.mkdir(parents=True, exist_ok=True) + ++ return out_dir / _get_file_name(tlobject) + +-def _get_path_for_type(root, type_, relative_to='.'): ++ ++def _get_path_for_type(type_): + """Similar to `_get_create_path_for` but for only type names.""" + if type_.lower() in CORE_TYPES: +- path = 'index.html#%s' % type_.lower() ++ return Path('index.html#%s' % type_.lower()) + elif '.' in type_: + namespace, name = type_.split('.') +- path = 'types/%s/%s' % (namespace, _get_file_name(name)) ++ return Path('types', namespace, _get_file_name(name)) + else: +- path = 'types/%s' % _get_file_name(type_) +- +- return _get_relative_path(os.path.join(root, path), relative_to) +- +- +-def _get_relative_path(destination, relative_to, folder=False): +- """Return the relative path to destination from relative_to.""" +- if not folder: +- relative_to = os.path.dirname(relative_to) +- +- return os.path.relpath(destination, start=relative_to) ++ return Path('types', _get_file_name(type_)) + + + def _find_title(html_file): + """Finds the for the given HTML file, or (Unknown).""" +- with open(html_file, 'r') as fp: +- for line in fp: ++ # TODO Is it necessary to read files like this? ++ with html_file.open() as f: ++ for line in f: + if '<title>' in line: + # + 7 to skip len('<title>') + return line[line.index('<title>') + 7:line.index('')] +@@ -77,25 +68,27 @@ + return '(Unknown)' + + +-def _build_menu(docs, filename, root, relative_main_index): +- """Builds the menu using the given DocumentWriter up to 'filename', +- which must be a file (it cannot be a directory)""" +- filename = _get_relative_path(filename, root) +- docs.add_menu('API', relative_main_index) +- +- items = filename.split('/') +- for i in range(len(items) - 1): +- item = items[i] +- link = '../' * (len(items) - (i + 2)) +- link += 'index.html' +- docs.add_menu(item.title(), link=link) ++def _build_menu(docs): ++ """ ++ Builds the menu used for the current ``DocumentWriter``. ++ """ ++ ++ paths = [] ++ current = docs.filename ++ while current != docs.root: ++ current = current.parent ++ paths.append(current) ++ ++ for path in reversed(paths): ++ docs.add_menu(path.stem.title(), link=path / 'index.html') ++ ++ if docs.filename.stem != 'index': ++ docs.add_menu(docs.title, link=docs.filename) + +- if items[-1] != 'index.html': +- docs.add_menu(os.path.splitext(items[-1])[0]) + docs.end_menu() + + +-def _generate_index(folder, original_paths, root, ++def _generate_index(root, folder, paths, + bots_index=False, bots_index_paths=()): + """Generates the index file for the specified folder""" + # Determine the namespaces listed here (as sub folders) +@@ -105,38 +98,24 @@ + INDEX = 'index.html' + BOT_INDEX = 'botindex.html' + +- if not bots_index: +- for item in os.listdir(folder): +- if os.path.isdir(os.path.join(folder, item)): +- namespaces.append(item) +- elif item not in (INDEX, BOT_INDEX): +- files.append(item) +- else: +- # bots_index_paths should be a list of "namespace/method.html" +- # or "method.html" +- for item in bots_index_paths: +- dirname = os.path.dirname(item) +- if dirname and dirname not in namespaces: +- namespaces.append(dirname) +- elif not dirname and item not in (INDEX, BOT_INDEX): +- files.append(item) +- +- paths = {k: _get_relative_path(v, folder, folder=True) +- for k, v in original_paths.items()} ++ for item in (bots_index_paths or folder.iterdir()): ++ if item.is_dir(): ++ namespaces.append(item) ++ elif item.name not in (INDEX, BOT_INDEX): ++ files.append(item) + + # Now that everything is setup, write the index.html file +- filename = os.path.join(folder, BOT_INDEX if bots_index else INDEX) +- with DocsWriter(filename, type_to_path=_get_path_for_type) as docs: ++ filename = folder / (BOT_INDEX if bots_index else INDEX) ++ with DocsWriter(root, filename, _get_path_for_type) as docs: + # Title should be the current folder name +- docs.write_head(folder.title(), +- relative_css_path=paths['css'], +- default_css=original_paths['default_css']) ++ docs.write_head(str(folder).title(), ++ css_path=paths['css'], ++ default_css=paths['default_css']) + + docs.set_menu_separator(paths['arrow']) +- _build_menu(docs, filename, root, +- relative_main_index=paths['index_all']) ++ _build_menu(docs) ++ docs.write_title(str(filename.parent.relative_to(root)).title()) + +- docs.write_title(_get_relative_path(folder, root, folder=True).title()) + if bots_index: + docs.write_text('These are the methods that you may be able to ' + 'use as a bot. Click here to ' +@@ -153,24 +132,22 @@ + namespace_paths = [] + if bots_index: + for item in bots_index_paths: +- if os.path.dirname(item) == namespace: +- namespace_paths.append(os.path.basename(item)) +- _generate_index(os.path.join(folder, namespace), +- original_paths, root, ++ if item.parent == namespace: ++ namespace_paths.append(item) ++ ++ _generate_index(root, namespace, paths, + bots_index, namespace_paths) +- if bots_index: +- docs.add_row(namespace.title(), +- link=os.path.join(namespace, BOT_INDEX)) +- else: +- docs.add_row(namespace.title(), +- link=os.path.join(namespace, INDEX)) ++ ++ docs.add_row( ++ namespace.stem.title(), ++ link=namespace / (BOT_INDEX if bots_index else INDEX)) + + docs.end_table() + + docs.write_title('Available items') + docs.begin_table(2) + +- files = [(f, _find_title(os.path.join(folder, f))) for f in files] ++ files = [(f, _find_title(f)) for f in files] + files.sort(key=lambda t: t[1]) + + for file, title in files: +@@ -231,7 +208,7 @@ + )) + + +-def _write_html_pages(tlobjects, methods, layer, input_res, output_dir): ++def _write_html_pages(root, tlobjects, methods, layer, input_res): + """ + Generates the documentation HTML files from from ``scheme.tl`` + to ``/methods`` and ``/constructors``, etc. +@@ -239,21 +216,18 @@ + # Save 'Type: [Constructors]' for use in both: + # * Seeing the return type or constructors belonging to the same type. + # * Generating the types documentation, showing available constructors. +- original_paths = { +- 'css': 'css', +- 'arrow': 'img/arrow.svg', +- 'search.js': 'js/search.js', +- '404': '404.html', +- 'index_all': 'index.html', +- 'bot_index': 'botindex.html', +- 'index_types': 'types/index.html', +- 'index_methods': 'methods/index.html', +- 'index_constructors': 'constructors/index.html' +- } +- original_paths = {k: os.path.join(output_dir, v) +- for k, v in original_paths.items()} +- +- original_paths['default_css'] = 'light' # docs..css, local path ++ paths = {k: root / v for k, v in ( ++ ('css', 'css'), ++ ('arrow', 'img/arrow.svg'), ++ ('search.js', 'js/search.js'), ++ ('404', '404.html'), ++ ('index_all', 'index.html'), ++ ('bot_index', 'botindex.html'), ++ ('index_types', 'types/index.html'), ++ ('index_methods', 'methods/index.html'), ++ ('index_constructors', 'constructors/index.html') ++ )} ++ paths['default_css'] = 'light' # docs..css, local path + type_to_constructors = defaultdict(list) + type_to_functions = defaultdict(list) + for tlobject in tlobjects: +@@ -266,24 +240,20 @@ + methods = {m.name: m for m in methods} + + # Since the output directory is needed everywhere partially apply it now +- create_path_for = functools.partial(_get_create_path_for, output_dir) +- path_for_type = functools.partial(_get_path_for_type, output_dir) ++ create_path_for = functools.partial(_get_create_path_for, root) ++ path_for_type = lambda t: root / _get_path_for_type(t) + bot_docs_paths = [] + + for tlobject in tlobjects: + filename = create_path_for(tlobject) +- paths = {k: _get_relative_path(v, filename) +- for k, v in original_paths.items()} +- +- with DocsWriter(filename, type_to_path=path_for_type) as docs: ++ with DocsWriter(root, filename, path_for_type) as docs: + docs.write_head(title=tlobject.class_name, +- relative_css_path=paths['css'], +- default_css=original_paths['default_css']) ++ css_path=paths['css'], ++ default_css=paths['default_css']) + + # Create the menu (path to the current TLObject) + docs.set_menu_separator(paths['arrow']) +- _build_menu(docs, filename, output_dir, +- relative_main_index=paths['index_all']) ++ _build_menu(docs) + + # Create the page title + docs.write_title(tlobject.class_name) +@@ -333,9 +303,7 @@ + inner = tlobject.result + + docs.begin_table(column_count=1) +- docs.add_row(inner, link=path_for_type( +- inner, relative_to=filename +- )) ++ docs.add_row(inner, link=path_for_type(inner)) + docs.end_table() + + cs = type_to_constructors.get(inner, []) +@@ -349,7 +317,6 @@ + docs.begin_table(column_count=2) + for constructor in cs: + link = create_path_for(constructor) +- link = _get_relative_path(link, relative_to=filename) + docs.add_row(constructor.class_name, link=link) + docs.end_table() + +@@ -380,8 +347,8 @@ + docs.add_row('!' + friendly_type, align='center') + else: + docs.add_row( +- friendly_type, align='center', link= +- path_for_type(arg.type, relative_to=filename) ++ friendly_type, align='center', ++ link=path_for_type(arg.type) + ) + + # Add a description for this argument +@@ -441,18 +408,13 @@ + docs.add_script(relative_src=paths['search.js']) + docs.end_body() + +- temp = [] +- for item in bot_docs_paths: +- temp.append(os.path.sep.join(item.split(os.path.sep)[2:])) +- bot_docs_paths = temp +- + # Find all the available types (which are not the same as the constructors) + # Each type has a list of constructors associated to it, hence is a map + for t, cs in type_to_constructors.items(): + filename = path_for_type(t) +- out_dir = os.path.dirname(filename) ++ out_dir = filename.parent + if out_dir: +- os.makedirs(out_dir, exist_ok=True) ++ out_dir.mkdir(parents=True, exist_ok=True) + + # Since we don't have access to the full TLObject, split the type + if '.' in t: +@@ -460,17 +422,13 @@ + else: + namespace, name = None, t + +- paths = {k: _get_relative_path(v, out_dir, folder=True) +- for k, v in original_paths.items()} +- +- with DocsWriter(filename, type_to_path=path_for_type) as docs: ++ with DocsWriter(root, filename, path_for_type) as docs: + docs.write_head(title=snake_to_camel_case(name), +- relative_css_path=paths['css'], +- default_css=original_paths['default_css']) ++ css_path=paths['css'], ++ default_css=paths['default_css']) + + docs.set_menu_separator(paths['arrow']) +- _build_menu(docs, filename, output_dir, +- relative_main_index=paths['index_all']) ++ _build_menu(docs) + + # Main file title + docs.write_title(snake_to_camel_case(name)) +@@ -489,7 +447,6 @@ + for constructor in cs: + # Constructor full name + link = create_path_for(constructor) +- link = _get_relative_path(link, relative_to=filename) + docs.add_row(constructor.class_name, link=link) + docs.end_table() + +@@ -509,7 +466,6 @@ + docs.begin_table(2) + for func in functions: + link = create_path_for(func) +- link = _get_relative_path(link, relative_to=filename) + docs.add_row(func.class_name, link=link) + docs.end_table() + +@@ -534,7 +490,6 @@ + docs.begin_table(2) + for ot in other_methods: + link = create_path_for(ot) +- link = _get_relative_path(link, relative_to=filename) + docs.add_row(ot.class_name, link=link) + docs.end_table() + +@@ -560,7 +515,6 @@ + docs.begin_table(2) + for ot in other_types: + link = create_path_for(ot) +- link = _get_relative_path(link, relative_to=filename) + docs.add_row(ot.class_name, link=link) + docs.end_table() + docs.end_body() +@@ -570,11 +524,10 @@ + # information that we have available, simply a file listing all the others + # accessible by clicking on their title + for folder in ['types', 'methods', 'constructors']: +- _generate_index(os.path.join(output_dir, folder), original_paths, +- output_dir) ++ _generate_index(root, root / folder, paths) + +- _generate_index(os.path.join(output_dir, 'methods'), original_paths, +- output_dir, True, bot_docs_paths) ++ _generate_index(root, root / 'methods', paths, True, ++ bot_docs_paths) + + # Write the final core index, the main index for the rest of files + types = set() +@@ -596,9 +549,8 @@ + methods = sorted(methods, key=lambda m: m.name) + cs = sorted(cs, key=lambda c: c.name) + +- shutil.copy(os.path.join(input_res, '404.html'), original_paths['404']) +- _copy_replace(os.path.join(input_res, 'core.html'), +- original_paths['index_all'], { ++ shutil.copy(str(input_res / '404.html'), str(paths['404'])) ++ _copy_replace(input_res / 'core.html', paths['index_all'], { + '{type_count}': len(types), + '{method_count}': len(methods), + '{constructor_count}': len(tlobjects) - len(methods), +@@ -624,17 +576,15 @@ + type_names = fmt(types, formatter=lambda x: x) + + # Local URLs shouldn't rely on the output's root, so set empty root +- create_path_for = functools.partial(_get_create_path_for, '', make=False) +- path_for_type = functools.partial(_get_path_for_type, '') ++ create_path_for = functools.partial( ++ _get_create_path_for, Path(), make=False) ++ + request_urls = fmt(methods, create_path_for) +- type_urls = fmt(types, path_for_type) ++ type_urls = fmt(types, _get_path_for_type) + constructor_urls = fmt(cs, create_path_for) + +- os.makedirs(os.path.abspath(os.path.join( +- original_paths['search.js'], os.path.pardir +- )), exist_ok=True) +- _copy_replace(os.path.join(input_res, 'js', 'search.js'), +- original_paths['search.js'], { ++ paths['search.js'].parent.mkdir(parents=True, exist_ok=True) ++ _copy_replace(input_res / 'js/search.js', paths['search.js'], { + '{request_names}': request_names, + '{type_names}': type_names, + '{constructor_names}': constructor_names, +@@ -649,11 +599,11 @@ + ('img', ['arrow.svg'])]: +- dirpath = os.path.join(out_dir, dirname) +- os.makedirs(dirpath, exist_ok=True) ++ dirpath = out_dir / dirname ++ dirpath.mkdir(parents=True, exist_ok=True) + for file in files: +- shutil.copy(os.path.join(res_dir, dirname, file), dirpath) ++ shutil.copy(str(res_dir / dirname / file), str(dirpath)) + + + def generate_docs(tlobjects, methods, layer, input_res, output_dir): +- os.makedirs(output_dir, exist_ok=True) +- _write_html_pages(tlobjects, methods, layer, input_res, output_dir) ++ output_dir.mkdir(parents=True, exist_ok=True) ++ _write_html_pages(output_dir, tlobjects, methods, layer, input_res) + _copy_resources(input_res, output_dir) +--- a/telethon_generator/generators/tlobject.py ++++ b/telethon_generator/generators/tlobject.py +@@ -48,9 +48,8 @@ + def _write_modules( + out_dir, depth, kind, namespace_tlobjects, type_constructors): + # namespace_tlobjects: {'namespace', [TLObject]} +- os.makedirs(out_dir, exist_ok=True) ++ out_dir.mkdir(parents=True, exist_ok=True) + for ns, tlobjects in namespace_tlobjects.items(): +- file = os.path.join(out_dir, '{}.py'.format(ns or '__init__')) +- with open(file, 'w', encoding='utf-8') as f,\ +- SourceBuilder(f) as builder: ++ file = out_dir / '{}.py'.format(ns or '__init__') ++ with file.open('w') as f, SourceBuilder(f) as builder: + builder.writeln(AUTO_GEN_NOTICE) + + builder.writeln('from {}.tl.tlobject import TLObject', '.' * depth) +@@ -635,11 +634,10 @@ + + + def _write_patched(out_dir, namespace_tlobjects): +- os.makedirs(out_dir, exist_ok=True) ++ out_dir.mkdir(parents=True, exist_ok=True) + for ns, tlobjects in namespace_tlobjects.items(): +- file = os.path.join(out_dir, '{}.py'.format(ns or '__init__')) +- with open(file, 'w', encoding='utf-8') as f,\ +- SourceBuilder(f) as builder: ++ file = out_dir / '{}.py'.format(ns or '__init__') ++ with file.open('w') as f, SourceBuilder(f) as builder: + builder.writeln(AUTO_GEN_NOTICE) + + builder.writeln('import struct') +@@ -715,26 +713,24 @@ + if tlobject.fullname in PATCHED_TYPES: + namespace_patched[tlobject.namespace].append(tlobject) + +- get_file = functools.partial(os.path.join, output_dir) +- _write_modules(get_file('functions'), import_depth, 'TLRequest', ++ _write_modules(output_dir / 'functions', import_depth, 'TLRequest', + namespace_functions, type_constructors) +- _write_modules(get_file('types'), import_depth, 'TLObject', ++ _write_modules(output_dir / 'types', import_depth, 'TLObject', + namespace_types, type_constructors) +- _write_patched(get_file('patched'), namespace_patched) ++ _write_patched(output_dir / 'patched', namespace_patched) + +- filename = os.path.join(get_file('alltlobjects.py')) +- with open(filename, 'w', encoding='utf-8') as file: ++ filename = output_dir / 'alltlobjects.py' ++ with filename.open('w') as file: + with SourceBuilder(file) as builder: + _write_all_tlobjects(tlobjects, layer, builder) + + + def clean_tlobjects(output_dir): +- get_file = functools.partial(os.path.join, output_dir) + for d in ('functions', 'types'): +- d = get_file(d) +- if os.path.isdir(d): +- shutil.rmtree(d) ++ d = output_dir / d ++ if d.is_dir(): ++ shutil.rmtree(str(d)) + +- tl = get_file('alltlobjects.py') +- if os.path.isfile(tl): +- os.remove(tl) ++ tl = output_dir / 'alltlobjects.py' ++ if tl.is_file(): ++ tl.unlink() +--- a/telethon_generator/parsers/errors.py ++++ b/telethon_generator/parsers/errors.py +@@ -57,7 +57,7 @@ + Parses the input CSV file with columns (name, error codes, description) + and yields `Error` instances as a result. + """ +- with open(csv_file, newline='') as f: ++ with csv_file.open(newline='') as f: + f = csv.reader(f) + next(f, None) # header + for line, (name, codes, description) in enumerate(f, start=2): +--- a/telethon_generator/parsers/methods.py ++++ b/telethon_generator/parsers/methods.py +@@ -30,7 +30,7 @@ + Parses the input CSV file with columns (method, usability, errors) + and yields `MethodInfo` instances as a result. + """ +- with open(csv_file, newline='') as f: ++ with csv_file.open(newline='') as f: + f = csv.reader(f) + next(f, None) # header + for line, (method, usability, errors) in enumerate(f, start=2): +--- a/telethon_generator/parsers/tlobject/parser.py ++++ b/telethon_generator/parsers/tlobject/parser.py +@@ -86,7 +86,7 @@ + obj_all = [] + obj_by_name = {} + obj_by_type = collections.defaultdict(list) +- with open(file_path, 'r', encoding='utf-8') as file: ++ with file_path.open() as file: + is_function = False + for line in file: + comment_index = line.find('//')