Initial import of Brookhaven site
This commit is contained in:
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
.env
|
||||
*.pyc
|
||||
__pycache__/
|
||||
venv/
|
||||
*.log
|
||||
cache/
|
||||
instance/
|
||||
336
app.py
Normal file
336
app.py
Normal file
@@ -0,0 +1,336 @@
|
||||
# app.py
|
||||
from __future__ import annotations
|
||||
import os, json
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from urllib.parse import urlencode, urlsplit, urlunsplit, parse_qsl
|
||||
from flask import Flask, render_template, request, redirect, url_for, jsonify, flash, Response
|
||||
from sqlalchemy import create_engine, text, Column, Integer, String, DateTime, JSON
|
||||
from sqlalchemy.orm import declarative_base, sessionmaker, scoped_session
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from functools import wraps
|
||||
import hmac
|
||||
from werkzeug.security import check_password_hash
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# App
|
||||
# -----------------------------------------------------------------------------
|
||||
app = Flask(__name__, static_folder="static", static_url_path="/static")
|
||||
|
||||
BRAND = "BrookHaven Technologies"
|
||||
TAGLINE = "Fast to prototype. Safe to scale."
|
||||
|
||||
|
||||
|
||||
# --- Personal contact (override via env) ---
|
||||
CONTACT = {
|
||||
"name": os.environ.get("BH_CONTACT_NAME", "Benjamin Mosley"),
|
||||
"title": os.environ.get("BH_CONTACT_TITLE", "Founder, BrookHaven Technologies"),
|
||||
"email": os.environ.get("BH_CONTACT_EMAIL", "ben@bennyshouse.net"),
|
||||
"phone": os.environ.get("BH_CONTACT_PHONE", "(806) 655 2300)"),
|
||||
"city": os.environ.get("BH_CONTACT_CITY", "Canyon / Amarillo / Borger / Remote"),
|
||||
"cal": os.environ.get("BH_CONTACT_CAL", "https://calendly.com/bennyshouse24/30min"),
|
||||
"link": os.environ.get("BH_CONTACT_LINK", "https://www.linkedin.com/in/benjamin-mosley-849643329/"),
|
||||
"site": os.environ.get("BH_CONTACT_SITE", "https://bennyshouse.net"),
|
||||
"hours": os.environ.get("BH_CONTACT_HOURS", "Mon–Fri, 9a–5p CT"),
|
||||
}
|
||||
|
||||
|
||||
|
||||
app.config.update(
|
||||
SECRET_KEY=os.environ.get("APP_SECRET_KEY", "dev"),
|
||||
SESSION_COOKIE_HTTPONLY=True,
|
||||
SESSION_COOKIE_SAMESITE="Lax",
|
||||
SESSION_COOKIE_SECURE=bool(int(os.environ.get("COOKIE_SECURE", "1"))), # set 1 in prod with HTTPS
|
||||
)
|
||||
|
||||
# Admin credentials (env-driven)
|
||||
ADMIN_USER = os.environ.get("BH_ADMIN_USER", "admin")
|
||||
ADMIN_PW_HASH = os.environ.get("BH_ADMIN_PASSWORD_HASH", "32768:8:1$pgll8a2zdtxky50G$8ef13bb775569f480da14618433b7b80a93f5cb3ef99b67878ddfb058d39e858f05d81b25c88365737d81400ee287a156c76de7b51aed33ea667030f7a83e10d") # pbkdf2 hash
|
||||
ADMIN_BEARER = os.environ.get("BH_ADMIN_BEARER", "") # optional static token
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# DB (MariaDB)
|
||||
# -----------------------------------------------------------------------------
|
||||
DB_URL = os.environ.get("DB_URL", "mysql+pymysql://tapdown:Swaows.1234@127.0.0.1/tapdown")
|
||||
|
||||
engine = create_engine(
|
||||
DB_URL,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_recycle=1800,
|
||||
pool_pre_ping=True,
|
||||
isolation_level="READ COMMITTED",
|
||||
future=True,
|
||||
)
|
||||
SessionLocal = scoped_session(sessionmaker(bind=engine, expire_on_commit=False, future=True))
|
||||
Base = declarative_base()
|
||||
|
||||
class Inquiry(Base):
|
||||
__tablename__ = "bh_inquiries"
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(String(160), nullable=False)
|
||||
email = Column(String(200), nullable=False)
|
||||
message = Column(String(4000), nullable=False)
|
||||
nda = Column(String(8), nullable=False, default="no") # yes|no
|
||||
meta = Column(JSON, nullable=False, default={}) # e.g. user agent
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
|
||||
_tables_ready = False
|
||||
@app.before_request
|
||||
def ensure_tables():
|
||||
global _tables_ready
|
||||
if _tables_ready:
|
||||
return
|
||||
try:
|
||||
with engine.begin() as conn:
|
||||
Base.metadata.create_all(conn)
|
||||
_tables_ready = True
|
||||
except SQLAlchemyError:
|
||||
app.logger.exception("DB init failed; continuing without DB")
|
||||
|
||||
@app.teardown_appcontext
|
||||
def remove_session(_=None):
|
||||
SessionLocal.remove()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# -----------------------------------------------------------------------------
|
||||
def with_utm(url: str, extra: Dict[str, str] | None = None) -> str:
|
||||
scheme, netloc, path, query, frag = urlsplit(url)
|
||||
q = dict(parse_qsl(query))
|
||||
q.update(extra or {})
|
||||
return urlunsplit((scheme, netloc, path, urlencode(q), frag))
|
||||
|
||||
|
||||
|
||||
app.config.update(
|
||||
SECRET_KEY=os.environ.get("APP_SECRET_KEY", "dev"),
|
||||
SESSION_COOKIE_HTTPONLY=True,
|
||||
SESSION_COOKIE_SAMESITE="Lax",
|
||||
SESSION_COOKIE_SECURE=bool(int(os.environ.get("COOKIE_SECURE", "0"))), # set 1 in prod with HTTPS
|
||||
)
|
||||
|
||||
def _is_admin_request():
|
||||
# 1) Bearer token (e.g., for automation or CSV curl)
|
||||
authz = request.headers.get("Authorization", "")
|
||||
if ADMIN_BEARER and authz.startswith("Bearer "):
|
||||
token = authz[7:].strip()
|
||||
if hmac.compare_digest(token, ADMIN_BEARER):
|
||||
return True
|
||||
|
||||
# 2) HTTP Basic for humans
|
||||
auth = request.authorization
|
||||
if auth and ADMIN_PW_HASH and auth.username == ADMIN_USER:
|
||||
try:
|
||||
if check_password_hash(ADMIN_PW_HASH, auth.password):
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Routes (pages)
|
||||
# -----------------------------------------------------------------------------
|
||||
@app.get("/")
|
||||
def home():
|
||||
return render_template("index.html", brand=BRAND, tagline=TAGLINE)
|
||||
|
||||
|
||||
def require_admin(view):
|
||||
@wraps(view)
|
||||
def _wrapped(*args, **kwargs):
|
||||
if _is_admin_request():
|
||||
return view(*args, **kwargs)
|
||||
return Response(
|
||||
"Authentication required",
|
||||
401,
|
||||
{"WWW-Authenticate": 'Basic realm="BrookHaven Admin"'},
|
||||
)
|
||||
return _wrapped
|
||||
|
||||
@app.get("/about")
|
||||
def about():
|
||||
return render_template("about.html", brand=BRAND)
|
||||
|
||||
@app.get("/services")
|
||||
def services():
|
||||
return render_template("services.html", brand=BRAND)
|
||||
|
||||
@app.get("/work")
|
||||
def work():
|
||||
# Example case studies (could be a JSON file later)
|
||||
cases = [
|
||||
{
|
||||
"title": "Tapdown Showdown — Cyber Sale Activation",
|
||||
"desc": "Survey-powered mini-game; branded UI, MariaDB analytics, CPU mode for flaky networks.",
|
||||
"bullets": ["90%+ survey completion", "UTM funnels to promo pages", "Works offline/kiosk"],
|
||||
"image": "/static/brookhaven-case.jpg",
|
||||
},
|
||||
{
|
||||
"title": "Kiosk Checkout Prototype",
|
||||
"desc": "Self-serve event checkout with QR receipts and local-first sync.",
|
||||
"bullets": ["Local cache", "Queue-busting UX", "Auto export"],
|
||||
"image": "/static/brookhaven-kiosk.jpg",
|
||||
},
|
||||
]
|
||||
return render_template("work.html", brand=BRAND, cases=cases)
|
||||
|
||||
@app.context_processor
|
||||
def inject_contact():
|
||||
return {"CONTACT": CONTACT}
|
||||
|
||||
|
||||
@app.get("/contact.vcf")
|
||||
def contact_vcf():
|
||||
# Generate a simple vCard 3.0
|
||||
n = CONTACT["name"]
|
||||
parts = n.split(" ", 1)
|
||||
last = parts[-1] if len(parts) > 1 else parts[0]
|
||||
first = parts[0]
|
||||
phone = CONTACT["phone"].replace(" ", "")
|
||||
email = CONTACT["email"]
|
||||
org = BRAND
|
||||
title = CONTACT["title"]
|
||||
url = CONTACT["site"]
|
||||
city = CONTACT["city"]
|
||||
|
||||
vcard = f"""BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
N:{last};{first};;;
|
||||
FN:{n}
|
||||
ORG:{org}
|
||||
TITLE:{title}
|
||||
TEL;TYPE=CELL,VOICE:{phone}
|
||||
EMAIL;TYPE=INTERNET:{email}
|
||||
URL:{url}
|
||||
ADR;TYPE=WORK:;;{city};;;;
|
||||
END:VCARD
|
||||
"""
|
||||
return (vcard, 200, {
|
||||
"Content-Type": "text/vcard; charset=utf-8",
|
||||
"Content-Disposition": 'attachment; filename="brookhaven-contact.vcf"',
|
||||
})
|
||||
|
||||
|
||||
@app.get("/contact")
|
||||
def contact():
|
||||
return render_template("contact.html", brand=BRAND)
|
||||
|
||||
@app.post("/contact")
|
||||
def contact_post():
|
||||
name = (request.form.get("name") or "").strip()
|
||||
email = (request.form.get("email") or "").strip()
|
||||
message = (request.form.get("message") or "").strip()
|
||||
nda = "yes" if request.form.get("nda") in ("on", "yes", "true") else "no"
|
||||
|
||||
if not name or not email or not message:
|
||||
flash("Please fill name, email, and a short description.", "error")
|
||||
return redirect(url_for("contact"))
|
||||
|
||||
# Persist (best-effort)
|
||||
meta = {"ua": request.headers.get("User-Agent", ""), "ip": request.remote_addr}
|
||||
db = SessionLocal()
|
||||
try:
|
||||
db.add(Inquiry(name=name, email=email, message=message, nda=nda, meta=meta))
|
||||
db.commit()
|
||||
except SQLAlchemyError:
|
||||
db.rollback()
|
||||
app.logger.exception("Failed to write inquiry")
|
||||
# keep going anyway to the thank-you page
|
||||
|
||||
return redirect(url_for("thanks"))
|
||||
|
||||
@app.get("/thanks")
|
||||
def thanks():
|
||||
return render_template("thanks.html", brand=BRAND)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Admin (read-only)
|
||||
# -----------------------------------------------------------------------------
|
||||
@app.get("/admin/inquiries")
|
||||
@require_admin
|
||||
def admin_inquiries():
|
||||
page = max(1, int(request.args.get("page", 1)))
|
||||
per_page = 25
|
||||
offset = (page - 1) * per_page
|
||||
db = SessionLocal()
|
||||
try:
|
||||
rows = db.execute(
|
||||
text("""SELECT id,name,email,message,nda,meta,created_at
|
||||
FROM bh_inquiries ORDER BY created_at DESC
|
||||
LIMIT :limit OFFSET :offset"""),
|
||||
{"limit": per_page, "offset": offset}
|
||||
).mappings().all()
|
||||
total = db.execute(text("SELECT COUNT(*) FROM bh_inquiries")).scalar_one()
|
||||
except Exception:
|
||||
app.logger.exception("Query failed")
|
||||
rows, total = [], 0
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Convert meta JSON (string from PyMySQL) to dict
|
||||
def parse_json(val):
|
||||
if isinstance(val, dict): return val
|
||||
if isinstance(val, str) and val:
|
||||
try: return json.loads(val)
|
||||
except Exception: return {}
|
||||
return {}
|
||||
processed = []
|
||||
for r in rows:
|
||||
processed.append({
|
||||
"id": r["id"], "name": r["name"], "email": r["email"],
|
||||
"message": r["message"], "nda": r["nda"],
|
||||
"meta": parse_json(r["meta"]),
|
||||
"created_at": r["created_at"],
|
||||
})
|
||||
pages = (total // per_page) + (1 if total % per_page else 0)
|
||||
return render_template("admin_inquiries.html",
|
||||
rows=processed, page=page, pages=pages, total=total, brand=BRAND)
|
||||
|
||||
@app.get("/admin/inquiries.csv")
|
||||
@require_admin
|
||||
def admin_inquiries_csv():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
rows = db.execute(
|
||||
text("""SELECT id,name,email,message,nda,meta,created_at
|
||||
FROM bh_inquiries ORDER BY created_at DESC""")
|
||||
).mappings().all()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
import csv, io
|
||||
buf = io.StringIO()
|
||||
w = csv.writer(buf)
|
||||
w.writerow(["id","name","email","nda","message","ua","ip","created_at"])
|
||||
for r in rows:
|
||||
meta = r["meta"]
|
||||
if isinstance(meta, str):
|
||||
try:
|
||||
meta = json.loads(meta)
|
||||
except Exception:
|
||||
meta = {}
|
||||
w.writerow([
|
||||
r["id"], r["name"], r["email"], r["nda"],
|
||||
(r["message"] or "").replace("\n"," ").strip(),
|
||||
(meta or {}).get("ua",""), (meta or {}).get("ip",""),
|
||||
r["created_at"],
|
||||
])
|
||||
out = buf.getvalue()
|
||||
return out, 200, {
|
||||
"Content-Type": "text/csv; charset=utf-8",
|
||||
"Content-Disposition": "attachment; filename=brookhaven_inquiries.csv"
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Utilities
|
||||
# -----------------------------------------------------------------------------
|
||||
@app.get("/healthz")
|
||||
def healthz():
|
||||
return jsonify({"ok": True, "brand": BRAND}), 200
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=5050, debug=False)
|
||||
|
||||
253
history/main.html
Normal file
253
history/main.html
Normal file
@@ -0,0 +1,253 @@
|
||||
<!doctype html>
|
||||
<html lang="en" data-theme="win95">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
<title>Indie Grid — Chunky Win95 / Aero</title>
|
||||
|
||||
<!-- Tailwind CDN (no build step) -->
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<script>
|
||||
tailwind.config = {
|
||||
theme: {
|
||||
extend: {
|
||||
fontFamily: { ui: ["Inter", "Tahoma", "ui-sans-serif", "system-ui"] },
|
||||
boxShadow: {
|
||||
aero: '0 20px 50px rgba(0,0,0,.25), 0 1px 0 rgba(255,255,255,.25) inset',
|
||||
glass: '0 1px 0 rgba(255,255,255,.6) inset, 0 8px 24px rgba(0,0,0,.12)'
|
||||
},
|
||||
borderRadius: {
|
||||
win: '6px',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
/* THEME TOKENS (swap via data-theme) */
|
||||
:root {
|
||||
--bg: #e9eef5;
|
||||
--ink: #0f172a;
|
||||
--surface: rgba(255,255,255,0.65);
|
||||
--border: rgba(0,0,0,.16);
|
||||
--accent: #1e90ff;
|
||||
--glass: blur(12px);
|
||||
--titlebar-grad: linear-gradient(180deg, rgba(255,255,255,.85), rgba(255,255,255,.45));
|
||||
--meta: rgba(0,0,0,.6);
|
||||
}
|
||||
[data-theme="win95"] {
|
||||
--bg: #c0c0c0;
|
||||
--ink: #000;
|
||||
--surface: #dfdfdf;
|
||||
--border: #808080;
|
||||
--accent: #008080;
|
||||
--glass: blur(0px);
|
||||
--titlebar-grad: linear-gradient(180deg, #0a246a, #a6caf0);
|
||||
--meta: #000;
|
||||
}
|
||||
|
||||
/* Retro paper texture overlay (very subtle) */
|
||||
body {
|
||||
background:
|
||||
repeating-linear-gradient(90deg, rgba(255,255,255,.45) 0 1px, transparent 1px 3px),
|
||||
repeating-linear-gradient(0deg, rgba(255,255,255,.35) 0 1px, transparent 1px 3px),
|
||||
var(--bg);
|
||||
background-blend-mode: overlay;
|
||||
}
|
||||
|
||||
/* Win95 bevel helpers */
|
||||
.bevel { border-width: 6px; border-style: solid; border-top-color:#fff; border-left-color:#fff; border-bottom-color:#808080; border-right-color:#808080; }
|
||||
.bevel-inset { border-width: 6px; border-style: solid; border-top-color:#808080; border-left-color:#808080; border-bottom-color:#fff; border-right-color:#fff; }
|
||||
|
||||
/* Press/tactile card */
|
||||
.card-press { transition: transform .06s ease, filter .06s ease; }
|
||||
.card-press:active { transform: translateY(1px); }
|
||||
.card-press:hover { filter: brightness(1.02); }
|
||||
|
||||
/* Snap-open window animation */
|
||||
@keyframes pop95 { 0% { transform: scale(.96) translateY(8px); filter: brightness(.96); opacity:0; } 100% { transform: scale(1) translateY(0); filter:none; opacity:1; } }
|
||||
.win-enter { animation: pop95 .14s ease-out both; }
|
||||
|
||||
/* Pixel dotted focus */
|
||||
.focus-95:focus { outline: 1px dotted #000; outline-offset: 2px; }
|
||||
|
||||
/* CRT overlay toggle */
|
||||
.crt .crt-panel { position: relative; overflow: hidden; }
|
||||
.crt .crt-panel::before{
|
||||
content:""; position:absolute; inset:0; pointer-events:none;
|
||||
background:
|
||||
linear-gradient(transparent 50%, rgba(0,0,0,.06) 50%),
|
||||
radial-gradient(circle at 50% 50%, rgba(255,255,255,.12), transparent 60%);
|
||||
background-size: 100% 2px, 100% 100%;
|
||||
mix-blend-mode: multiply;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body class="min-h-screen font-ui text-[color:var(--ink)]">
|
||||
<!-- HEADER -->
|
||||
<header class="max-w-[1120px] mx-auto px-4 py-3">
|
||||
<div class="flex items-center justify-between gap-4">
|
||||
<a href="#" class="flex items-center gap-2 focus-95">
|
||||
<div class="w-8 h-8 bg-[color:var(--accent)] bevel"></div>
|
||||
<span class="font-extrabold tracking-wider uppercase">Indie Grid</span>
|
||||
</a>
|
||||
<div class="flex items-center gap-2">
|
||||
<button id="btnAero" class="focus-95 px-3 py-2 text-sm font-bold rounded-win border-2 border-[color:var(--border)] bg-white/80 hover:bg-white">Aero</button>
|
||||
<button id="btn95" class="focus-95 px-3 py-2 text-sm font-bold bevel bg-[color:var(--surface)]">Win95</button>
|
||||
<button id="btnCRT" class="focus-95 px-3 py-2 text-sm font-bold rounded-win border-2 border-[color:var(--border)] bg-white/80 hover:bg-white">CRT</button>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- MAIN -->
|
||||
<main class="max-w-[1120px] mx-auto px-4">
|
||||
<!-- GRID -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-[2fr_1fr] gap-6">
|
||||
<!-- PRIMARY FEED -->
|
||||
<section class="space-y-6">
|
||||
<!-- Card 1: Aero-ish chrome, becomes 95 via tokens -->
|
||||
<article class="relative rounded-win border border-[color:var(--border)] backdrop-blur-[var(--glass)] bg-[color:var(--surface)] shadow-glass card-press win-enter crt-panel">
|
||||
<!-- Title bar -->
|
||||
<div class="flex items-center justify-between px-3 py-2 border-b border-[color:var(--border)]" style="background: var(--titlebar-grad); color: #0b1220;">
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="w-4 h-4 bg-[color:var(--accent)]"></div>
|
||||
<span class="font-bold tracking-wide uppercase text-xs">Gaming</span>
|
||||
</div>
|
||||
<div class="flex gap-1.5">
|
||||
<span class="w-3.5 h-3.5 rounded-full bg-[#ff5f56]"></span>
|
||||
<span class="w-3.5 h-3.5 rounded-full bg-[#f6c026]"></span>
|
||||
<span class="w-3.5 h-3.5 rounded-full bg-[#2ecc71]"></span>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Meta -->
|
||||
<div class="px-4 py-2 text-sm border-b border-[color:var(--border)]" style="color: var(--meta);">
|
||||
<time datetime="2020-09-01">9.1.2020</time>
|
||||
<span class="mx-2">•</span>
|
||||
<a href="#" class="underline">chess</a>
|
||||
</div>
|
||||
<!-- Content -->
|
||||
<div class="p-4">
|
||||
<h2 class="text-2xl font-extrabold leading-tight mb-3 tracking-tight">A chunky Aero/95 take on the Indie card.</h2>
|
||||
<div class="overflow-hidden rounded-[4px] bevel-inset">
|
||||
<img src="https://picsum.photos/seed/chunky/960/520" class="w-full block" alt="Chunky card image">
|
||||
</div>
|
||||
<p class="mt-3">Big borders, beveled edges, and frosted panels bring that throwback desktop feel.</p>
|
||||
<a href="#" class="mt-2 inline-block font-bold underline focus-95">READ MORE »</a>
|
||||
</div>
|
||||
</article>
|
||||
|
||||
<!-- Card 2: Strong Win95 chrome -->
|
||||
<article class="relative bevel bg-[color:var(--surface)] card-press win-enter crt-panel">
|
||||
<div class="px-3 py-2 flex items-center justify-between border-b-4 border-b-[#808080]" style="background: var(--titlebar-grad); color:#fff;">
|
||||
<span class="font-bold tracking-wide uppercase text-xs">Code</span>
|
||||
<div class="flex gap-1.5">
|
||||
<span class="w-3.5 h-3.5 bg-white"></span>
|
||||
<span class="w-3.5 h-3.5 bg-white"></span>
|
||||
<span class="w-3.5 h-3.5 bg-white"></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="px-4 py-2 text-sm text-black border-b-4 border-b-[#808080]">
|
||||
<time datetime="2020-08-31">8.31.2020</time>
|
||||
<span class="mx-2">•</span>
|
||||
<a href="#" class="underline">resource</a>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<h2 class="text-2xl font-extrabold mb-3">Tower defense… but Flexbox.</h2>
|
||||
<div class="bevel-inset overflow-hidden">
|
||||
<img src="https://picsum.photos/seed/flex/960/520" alt="Flexbox" class="w-full block">
|
||||
</div>
|
||||
<p class="mt-3">Train display order, growth, and basis—rendered like a 1995 dialog box.</p>
|
||||
<a href="#" class="mt-2 inline-block font-bold underline focus-95">START WAVE 1 »</a>
|
||||
</div>
|
||||
</article>
|
||||
</section>
|
||||
|
||||
<!-- SECONDARY STACK -->
|
||||
<aside class="space-y-6">
|
||||
<article class="rounded-win border border-[color:var(--border)] backdrop-blur-[var(--glass)] bg-[color:var(--surface)] shadow-glass card-press win-enter crt-panel">
|
||||
<div class="px-3 py-2 border-b border-[color:var(--border)] flex items-center justify-between" style="background: var(--titlebar-grad); color: #0b1220;">
|
||||
<span class="font-bold uppercase text-xs">Resource</span>
|
||||
</div>
|
||||
<div class="px-4 py-2 text-sm border-b border-[color:var(--border)]" style="color: var(--meta);">
|
||||
<time datetime="2020-07-10">7.10.2020</time> • <a href="#" class="underline">icons</a>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<h3 class="text-xl font-extrabold mb-2">Icon pack in MS-era chrome</h3>
|
||||
<div class="bevel-inset overflow-hidden">
|
||||
<img src="https://picsum.photos/seed/icons/800/480" alt="Icons" class="w-full block">
|
||||
</div>
|
||||
<a href="#" class="mt-2 inline-block font-bold underline focus-95">WHO, WHAT, WHERE…</a>
|
||||
</div>
|
||||
</article>
|
||||
|
||||
<article class="bevel bg-[color:var(--surface)] card-press win-enter crt-panel">
|
||||
<div class="px-3 py-2 flex items-center justify-between border-b-4 border-b-[#808080]" style="background: var(--titlebar-grad); color:#fff;">
|
||||
<span class="font-bold uppercase text-xs">Audio</span>
|
||||
</div>
|
||||
<div class="px-4 py-2 text-sm text-black border-b-4 border-b-[#808080]">
|
||||
<time datetime="2020-06-14">6.14.2020</time> • <a href="#" class="underline">git</a>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<h3 class="text-xl font-extrabold mb-2">Scales around the world</h3>
|
||||
<div class="bevel-inset overflow-hidden">
|
||||
<img src="https://picsum.photos/seed/omni/800/480" alt="A Digital Harp" class="w-full block">
|
||||
</div>
|
||||
<a href="#" class="mt-2 inline-block font-bold underline focus-95">TELL ME MORE…</a>
|
||||
</div>
|
||||
</article>
|
||||
</aside>
|
||||
</div>
|
||||
|
||||
<!-- Pagination -->
|
||||
<nav class="py-8 flex justify-end">
|
||||
<a href="#" class="focus-95 px-4 py-2 font-bold border-2 rounded-win border-[color:var(--border)] bg-white/80 hover:bg-white">MORE >>></a>
|
||||
</nav>
|
||||
</main>
|
||||
|
||||
<!-- TASKBAR FOOTER -->
|
||||
<footer class="fixed bottom-0 left-0 right-0 z-50 bevel bg-[color:var(--surface)]">
|
||||
<div class="max-w-[1120px] mx-auto px-2 py-1 flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<button class="bevel px-3 py-1 font-bold text-sm focus-95">Start</button>
|
||||
<nav class="hidden sm:flex gap-2 text-sm">
|
||||
<a class="bevel px-2 py-1 focus-95" href="#">Posts</a>
|
||||
<a class="bevel px-2 py-1 focus-95" href="#">Tags</a>
|
||||
<a class="bevel px-2 py-1 focus-95" href="#">About</a>
|
||||
</nav>
|
||||
</div>
|
||||
<span id="clock" class="bevel px-2 py-1 font-mono text-sm">00:00</span>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
// Theme toggles
|
||||
const html = document.documentElement;
|
||||
document.getElementById('btnAero').onclick = () => html.setAttribute('data-theme','aero');
|
||||
document.getElementById('btn95').onclick = () => html.setAttribute('data-theme','win95');
|
||||
|
||||
// CRT toggle
|
||||
document.getElementById('btnCRT').onclick = () => html.classList.toggle('crt');
|
||||
|
||||
// Clock
|
||||
const clk = () => document.getElementById('clock').textContent =
|
||||
new Date().toLocaleTimeString([], {hour:'2-digit', minute:'2-digit'});
|
||||
clk(); setInterval(clk, 1000);
|
||||
|
||||
// Konami code to flip theme
|
||||
(() => {
|
||||
const seq = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','b','a'];
|
||||
let i = 0;
|
||||
window.addEventListener('keydown', e => {
|
||||
i = (e.key === seq[i]) ? i+1 : 0;
|
||||
if (i === seq.length) {
|
||||
html.setAttribute('data-theme', html.getAttribute('data-theme') === 'win95' ? 'aero' : 'win95');
|
||||
i = 0;
|
||||
}
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
BIN
static/United_Flyer.jpg
Normal file
BIN
static/United_Flyer.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 483 KiB |
BIN
static/buffteks.png
Normal file
BIN
static/buffteks.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 178 KiB |
247
tapdown/bin/Activate.ps1
Normal file
247
tapdown/bin/Activate.ps1
Normal file
@@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
69
tapdown/bin/activate
Normal file
69
tapdown/bin/activate
Normal file
@@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=/var/www/benny/tapdown
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(tapdown) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(tapdown) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
26
tapdown/bin/activate.csh
Normal file
26
tapdown/bin/activate.csh
Normal file
@@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /var/www/benny/tapdown
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(tapdown) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(tapdown) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
tapdown/bin/activate.fish
Normal file
69
tapdown/bin/activate.fish
Normal file
@@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /var/www/benny/tapdown
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(tapdown) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(tapdown) '
|
||||
end
|
||||
8
tapdown/bin/flask
Executable file
8
tapdown/bin/flask
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/var/www/benny/tapdown/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flask.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
tapdown/bin/gunicorn
Executable file
8
tapdown/bin/gunicorn
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/var/www/benny/tapdown/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from gunicorn.app.wsgiapp import run
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run())
|
||||
8
tapdown/bin/pip
Executable file
8
tapdown/bin/pip
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/var/www/benny/tapdown/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
tapdown/bin/pip3
Executable file
8
tapdown/bin/pip3
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/var/www/benny/tapdown/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
tapdown/bin/pip3.11
Executable file
8
tapdown/bin/pip3.11
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/var/www/benny/tapdown/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
tapdown/bin/python
Symbolic link
1
tapdown/bin/python
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
1
tapdown/bin/python3
Symbolic link
1
tapdown/bin/python3
Symbolic link
@@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
tapdown/bin/python3.11
Symbolic link
1
tapdown/bin/python3.11
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
164
tapdown/include/site/python3.11/greenlet/greenlet.h
Normal file
164
tapdown/include/site/python3.11/greenlet/greenlet.h
Normal file
@@ -0,0 +1,164 @@
|
||||
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
|
||||
|
||||
/* Greenlet object interface */
|
||||
|
||||
#ifndef Py_GREENLETOBJECT_H
|
||||
#define Py_GREENLETOBJECT_H
|
||||
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* This is deprecated and undocumented. It does not change. */
|
||||
#define GREENLET_VERSION "1.0.0"
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
#define implementation_ptr_t void*
|
||||
#endif
|
||||
|
||||
typedef struct _greenlet {
|
||||
PyObject_HEAD
|
||||
PyObject* weakreflist;
|
||||
PyObject* dict;
|
||||
implementation_ptr_t pimpl;
|
||||
} PyGreenlet;
|
||||
|
||||
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
|
||||
|
||||
|
||||
/* C API functions */
|
||||
|
||||
/* Total number of symbols that are exported */
|
||||
#define PyGreenlet_API_pointers 12
|
||||
|
||||
#define PyGreenlet_Type_NUM 0
|
||||
#define PyExc_GreenletError_NUM 1
|
||||
#define PyExc_GreenletExit_NUM 2
|
||||
|
||||
#define PyGreenlet_New_NUM 3
|
||||
#define PyGreenlet_GetCurrent_NUM 4
|
||||
#define PyGreenlet_Throw_NUM 5
|
||||
#define PyGreenlet_Switch_NUM 6
|
||||
#define PyGreenlet_SetParent_NUM 7
|
||||
|
||||
#define PyGreenlet_MAIN_NUM 8
|
||||
#define PyGreenlet_STARTED_NUM 9
|
||||
#define PyGreenlet_ACTIVE_NUM 10
|
||||
#define PyGreenlet_GET_PARENT_NUM 11
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
/* This section is used by modules that uses the greenlet C API */
|
||||
static void** _PyGreenlet_API = NULL;
|
||||
|
||||
# define PyGreenlet_Type \
|
||||
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
|
||||
|
||||
# define PyExc_GreenletError \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
|
||||
|
||||
# define PyExc_GreenletExit \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_New(PyObject *args)
|
||||
*
|
||||
* greenlet.greenlet(run, parent=None)
|
||||
*/
|
||||
# define PyGreenlet_New \
|
||||
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
|
||||
_PyGreenlet_API[PyGreenlet_New_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetCurrent(void)
|
||||
*
|
||||
* greenlet.getcurrent()
|
||||
*/
|
||||
# define PyGreenlet_GetCurrent \
|
||||
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Throw(
|
||||
* PyGreenlet *greenlet,
|
||||
* PyObject *typ,
|
||||
* PyObject *val,
|
||||
* PyObject *tb)
|
||||
*
|
||||
* g.throw(...)
|
||||
*/
|
||||
# define PyGreenlet_Throw \
|
||||
(*(PyObject * (*)(PyGreenlet * self, \
|
||||
PyObject * typ, \
|
||||
PyObject * val, \
|
||||
PyObject * tb)) \
|
||||
_PyGreenlet_API[PyGreenlet_Throw_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
|
||||
*
|
||||
* g.switch(*args, **kwargs)
|
||||
*/
|
||||
# define PyGreenlet_Switch \
|
||||
(*(PyObject * \
|
||||
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
|
||||
_PyGreenlet_API[PyGreenlet_Switch_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
|
||||
*
|
||||
* g.parent = new_parent
|
||||
*/
|
||||
# define PyGreenlet_SetParent \
|
||||
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
|
||||
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetParent(PyObject* greenlet)
|
||||
*
|
||||
* return greenlet.parent;
|
||||
*
|
||||
* This could return NULL even if there is no exception active.
|
||||
* If it does not return NULL, you are responsible for decrementing the
|
||||
* reference count.
|
||||
*/
|
||||
# define PyGreenlet_GetParent \
|
||||
(*(PyGreenlet* (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
|
||||
|
||||
/*
|
||||
* deprecated, undocumented alias.
|
||||
*/
|
||||
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
|
||||
|
||||
# define PyGreenlet_MAIN \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
|
||||
|
||||
# define PyGreenlet_STARTED \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
|
||||
|
||||
# define PyGreenlet_ACTIVE \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
|
||||
|
||||
|
||||
|
||||
|
||||
/* Macro that imports greenlet and initializes C API */
|
||||
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
|
||||
keep the older definition to be sure older code that might have a copy of
|
||||
the header still works. */
|
||||
# define PyGreenlet_Import() \
|
||||
{ \
|
||||
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
|
||||
}
|
||||
|
||||
#endif /* GREENLET_MODULE */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_GREENLETOBJECT_H */
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Miguel Grinberg
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1,76 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Flask-SocketIO
|
||||
Version: 5.5.1
|
||||
Summary: Socket.IO integration for Flask applications
|
||||
Author-email: Miguel Grinberg <miguel.grinberg@gmail.com>
|
||||
Project-URL: Homepage, https://github.com/miguelgrinberg/flask-socketio
|
||||
Project-URL: Bug Tracker, https://github.com/miguelgrinberg/flask-socketio/issues
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/markdown
|
||||
License-File: LICENSE
|
||||
Requires-Dist: Flask>=0.9
|
||||
Requires-Dist: python-socketio>=5.12.0
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx; extra == "docs"
|
||||
|
||||
Flask-SocketIO
|
||||
==============
|
||||
|
||||
[](https://github.com/miguelgrinberg/Flask-SocketIO/actions) [](https://codecov.io/gh/miguelgrinberg/flask-socketio)
|
||||
|
||||
Socket.IO integration for Flask applications.
|
||||
|
||||
Sponsors
|
||||
--------
|
||||
|
||||
The following organizations are funding this project:
|
||||
|
||||
<br>[Socket.IO](https://socket.io) | [Add your company here!](https://github.com/sponsors/miguelgrinberg)|
|
||||
-|-
|
||||
|
||||
Many individual sponsors also support this project through small ongoing contributions. Why not [join them](https://github.com/sponsors/miguelgrinberg)?
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
You can install this package as usual with pip:
|
||||
|
||||
pip install flask-socketio
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
```py
|
||||
from flask import Flask, render_template
|
||||
from flask_socketio import SocketIO, emit
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['SECRET_KEY'] = 'secret!'
|
||||
socketio = SocketIO(app)
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
return render_template('index.html')
|
||||
|
||||
@socketio.event
|
||||
def my_event(message):
|
||||
emit('my response', {'data': 'got it!'})
|
||||
|
||||
if __name__ == '__main__':
|
||||
socketio.run(app)
|
||||
```
|
||||
|
||||
Resources
|
||||
---------
|
||||
|
||||
- [Tutorial](http://blog.miguelgrinberg.com/post/easy-websockets-with-flask-and-gevent)
|
||||
- [Documentation](http://flask-socketio.readthedocs.io/en/latest/)
|
||||
- [PyPI](https://pypi.python.org/pypi/Flask-SocketIO)
|
||||
- [Change Log](https://github.com/miguelgrinberg/Flask-SocketIO/blob/main/CHANGES.md)
|
||||
- Questions? See the [questions](https://stackoverflow.com/questions/tagged/flask-socketio) others have asked on Stack Overflow, or [ask](https://stackoverflow.com/questions/ask?tags=python+flask-socketio+python-socketio) your own question.
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
Flask_SocketIO-5.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Flask_SocketIO-5.5.1.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082
|
||||
Flask_SocketIO-5.5.1.dist-info/METADATA,sha256=7YA8ZKizrtJiaCqqdDiTU6t1xWWdTmNw3CqBxSMcW3k,2635
|
||||
Flask_SocketIO-5.5.1.dist-info/RECORD,,
|
||||
Flask_SocketIO-5.5.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
Flask_SocketIO-5.5.1.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
|
||||
Flask_SocketIO-5.5.1.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15
|
||||
flask_socketio/__init__.py,sha256=5hN0LE0hfGMUDcX4FheZrtXERJ1IBEPagv0pgeqdtlU,54904
|
||||
flask_socketio/__pycache__/__init__.cpython-311.pyc,,
|
||||
flask_socketio/__pycache__/namespace.cpython-311.pyc,,
|
||||
flask_socketio/__pycache__/test_client.cpython-311.pyc,,
|
||||
flask_socketio/namespace.py,sha256=UkVryJvFYgnCMKWSF35GVfGdyh2cXRDyRbfmEPPchVA,2329
|
||||
flask_socketio/test_client.py,sha256=rClk02TSRqgidH8IyeohspKVKdpRx7gcZBjg1YUtZpA,11026
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.7.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
flask_socketio
|
||||
222
tapdown/lib/python3.11/site-packages/_distutils_hack/__init__.py
Normal file
222
tapdown/lib/python3.11/site-packages/_distutils_hack/__init__.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# don't import any costly modules
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||
|
||||
|
||||
def warn_distutils_present():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
if is_pypy and sys.version_info < (3, 7):
|
||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||
"using distutils directly, ensure that setuptools is installed in the "
|
||||
"traditional way (e.g. not an editable install), and/or make sure "
|
||||
"that setuptools is always imported before distutils."
|
||||
)
|
||||
|
||||
|
||||
def clear_distutils():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn("Setuptools is replacing distutils.")
|
||||
mods = [
|
||||
name
|
||||
for name in sys.modules
|
||||
if name == "distutils" or name.startswith("distutils.")
|
||||
]
|
||||
for name in mods:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def enabled():
|
||||
"""
|
||||
Allow selection of distutils by environment variable.
|
||||
"""
|
||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||
return which == 'local'
|
||||
|
||||
|
||||
def ensure_local_distutils():
|
||||
import importlib
|
||||
|
||||
clear_distutils()
|
||||
|
||||
# With the DistutilsMetaFinder in place,
|
||||
# perform an import to cause distutils to be
|
||||
# loaded from setuptools._distutils. Ref #2906.
|
||||
with shim():
|
||||
importlib.import_module('distutils')
|
||||
|
||||
# check that submodules load as expected
|
||||
core = importlib.import_module('distutils.core')
|
||||
assert '_distutils' in core.__file__, core.__file__
|
||||
assert 'setuptools._distutils.log' not in sys.modules
|
||||
|
||||
|
||||
def do_override():
|
||||
"""
|
||||
Ensure that the local copy of distutils is preferred over stdlib.
|
||||
|
||||
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||
for more motivation.
|
||||
"""
|
||||
if enabled():
|
||||
warn_distutils_present()
|
||||
ensure_local_distutils()
|
||||
|
||||
|
||||
class _TrivialRe:
|
||||
def __init__(self, *patterns):
|
||||
self._patterns = patterns
|
||||
|
||||
def match(self, string):
|
||||
return all(pat in string for pat in self._patterns)
|
||||
|
||||
|
||||
class DistutilsMetaFinder:
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
# optimization: only consider top level modules and those
|
||||
# found in the CPython test suite.
|
||||
if path is not None and not fullname.startswith('test.'):
|
||||
return
|
||||
|
||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||
method = getattr(self, method_name, lambda: None)
|
||||
return method()
|
||||
|
||||
def spec_for_distutils(self):
|
||||
if self.is_cpython():
|
||||
return
|
||||
|
||||
import importlib
|
||||
import importlib.abc
|
||||
import importlib.util
|
||||
|
||||
try:
|
||||
mod = importlib.import_module('setuptools._distutils')
|
||||
except Exception:
|
||||
# There are a couple of cases where setuptools._distutils
|
||||
# may not be present:
|
||||
# - An older Setuptools without a local distutils is
|
||||
# taking precedence. Ref #2957.
|
||||
# - Path manipulation during sitecustomize removes
|
||||
# setuptools from the path but only after the hook
|
||||
# has been loaded. Ref #2980.
|
||||
# In either case, fall back to stdlib behavior.
|
||||
return
|
||||
|
||||
class DistutilsLoader(importlib.abc.Loader):
|
||||
def create_module(self, spec):
|
||||
mod.__name__ = 'distutils'
|
||||
return mod
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
return importlib.util.spec_from_loader(
|
||||
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def is_cpython():
|
||||
"""
|
||||
Suppress supplying distutils for CPython (build and tests).
|
||||
Ref #2965 and #3007.
|
||||
"""
|
||||
return os.path.isfile('pybuilddir.txt')
|
||||
|
||||
def spec_for_pip(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running under pip.
|
||||
See pypa/pip#8761 for rationale.
|
||||
"""
|
||||
if self.pip_imported_during_build():
|
||||
return
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
@classmethod
|
||||
def pip_imported_during_build(cls):
|
||||
"""
|
||||
Detect if pip is being imported in a build script. Ref #2355.
|
||||
"""
|
||||
import traceback
|
||||
|
||||
return any(
|
||||
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def frame_file_is_setup(frame):
|
||||
"""
|
||||
Return True if the indicated frame suggests a setup.py file.
|
||||
"""
|
||||
# some frames may not have __file__ (#2940)
|
||||
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||
|
||||
def spec_for_sensitive_tests(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running select tests under CPython.
|
||||
|
||||
python/cpython#91169
|
||||
"""
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
sensitive_tests = (
|
||||
[
|
||||
'test.test_distutils',
|
||||
'test.test_peg_generator',
|
||||
'test.test_importlib',
|
||||
]
|
||||
if sys.version_info < (3, 10)
|
||||
else [
|
||||
'test.test_distutils',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
for name in DistutilsMetaFinder.sensitive_tests:
|
||||
setattr(
|
||||
DistutilsMetaFinder,
|
||||
f'spec_for_{name}',
|
||||
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||
)
|
||||
|
||||
|
||||
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||
|
||||
|
||||
def add_shim():
|
||||
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||
|
||||
|
||||
class shim:
|
||||
def __enter__(self):
|
||||
insert_shim()
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
remove_shim()
|
||||
|
||||
|
||||
def insert_shim():
|
||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||
|
||||
|
||||
def remove_shim():
|
||||
try:
|
||||
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -0,0 +1 @@
|
||||
__import__('_distutils_hack').do_override()
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,376 @@
|
||||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
||||
@@ -0,0 +1,260 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: bidict
|
||||
Version: 0.23.1
|
||||
Summary: The bidirectional mapping library for Python.
|
||||
Author-email: Joshua Bronson <jabronson@gmail.com>
|
||||
License: MPL 2.0
|
||||
Project-URL: Changelog, https://bidict.readthedocs.io/changelog.html
|
||||
Project-URL: Documentation, https://bidict.readthedocs.io
|
||||
Project-URL: Funding, https://bidict.readthedocs.io/#sponsoring
|
||||
Project-URL: Repository, https://github.com/jab/bidict
|
||||
Keywords: bidict,bimap,bidirectional,dict,dictionary,mapping,collections
|
||||
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
|
||||
.. role:: doc
|
||||
.. (Forward declaration for the "doc" role that Sphinx defines for interop with renderers that
|
||||
are often used to show this doc and that are unaware of Sphinx (GitHub.com, PyPI.org, etc.).
|
||||
Use :doc: rather than :ref: here for better interop as well.)
|
||||
|
||||
|
||||
bidict
|
||||
======
|
||||
|
||||
*The bidirectional mapping library for Python.*
|
||||
|
||||
|
||||
Status
|
||||
------
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/bidict.svg
|
||||
:target: https://pypi.org/project/bidict
|
||||
:alt: Latest release
|
||||
|
||||
.. image:: https://img.shields.io/readthedocs/bidict/main.svg
|
||||
:target: https://bidict.readthedocs.io/en/main/
|
||||
:alt: Documentation
|
||||
|
||||
.. image:: https://github.com/jab/bidict/actions/workflows/test.yml/badge.svg
|
||||
:target: https://github.com/jab/bidict/actions/workflows/test.yml?query=branch%3Amain
|
||||
:alt: GitHub Actions CI status
|
||||
|
||||
.. image:: https://img.shields.io/pypi/l/bidict.svg
|
||||
:target: https://raw.githubusercontent.com/jab/bidict/main/LICENSE
|
||||
:alt: License
|
||||
|
||||
.. image:: https://static.pepy.tech/badge/bidict
|
||||
:target: https://pepy.tech/project/bidict
|
||||
:alt: PyPI Downloads
|
||||
|
||||
.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4
|
||||
:target: https://github.com/sponsors/jab
|
||||
:alt: Sponsor
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Mature: Depended on by
|
||||
Google, Venmo, CERN, Baidu, Tencent,
|
||||
and teams across the world since 2009
|
||||
|
||||
- Familiar, Pythonic APIs
|
||||
that are carefully designed for
|
||||
safety, simplicity, flexibility, and ergonomics
|
||||
|
||||
- Lightweight, with no runtime dependencies
|
||||
outside Python's standard library
|
||||
|
||||
- Implemented in
|
||||
concise, well-factored, fully type-hinted Python code
|
||||
that is optimized for running efficiently
|
||||
as well as for long-term maintenance and stability
|
||||
(as well as `joy <#learning-from-bidict>`__)
|
||||
|
||||
- Extensively `documented <https://bidict.readthedocs.io>`__
|
||||
|
||||
- 100% test coverage
|
||||
running continuously across all supported Python versions
|
||||
(including property-based tests and benchmarks)
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``pip install bidict``
|
||||
|
||||
|
||||
Quick Start
|
||||
-----------
|
||||
|
||||
.. code:: python
|
||||
|
||||
>>> from bidict import bidict
|
||||
>>> element_by_symbol = bidict({'H': 'hydrogen'})
|
||||
>>> element_by_symbol['H']
|
||||
'hydrogen'
|
||||
>>> element_by_symbol.inverse['hydrogen']
|
||||
'H'
|
||||
|
||||
|
||||
For more usage documentation,
|
||||
head to the :doc:`intro` [#fn-intro]_
|
||||
and proceed from there.
|
||||
|
||||
|
||||
Enterprise Support
|
||||
------------------
|
||||
|
||||
Enterprise-level support for bidict can be obtained via the
|
||||
`Tidelift subscription <https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=readme>`__
|
||||
or by `contacting me directly <mailto:jabronson@gmail.com>`__.
|
||||
|
||||
I have a US-based LLC set up for invoicing,
|
||||
and I have 15+ years of professional experience
|
||||
delivering software and support to companies successfully.
|
||||
|
||||
You can also sponsor my work through several platforms, including GitHub Sponsors.
|
||||
See the `Sponsoring <#sponsoring>`__ section below for details,
|
||||
including rationale and examples of companies
|
||||
supporting the open source projects they depend on.
|
||||
|
||||
|
||||
Voluntary Community Support
|
||||
---------------------------
|
||||
|
||||
Please search through already-asked questions and answers
|
||||
in `GitHub Discussions <https://github.com/jab/bidict/discussions>`__
|
||||
and the `issue tracker <https://github.com/jab/bidict/issues?q=is%3Aissue>`__
|
||||
in case your question has already been addressed.
|
||||
|
||||
Otherwise, please feel free to
|
||||
`start a new discussion <https://github.com/jab/bidict/discussions>`__
|
||||
or `create a new issue <https://github.com/jab/bidict/issues/new>`__ on GitHub
|
||||
for voluntary community support.
|
||||
|
||||
|
||||
Notice of Usage
|
||||
---------------
|
||||
|
||||
If you use bidict,
|
||||
and especially if your usage or your organization is significant in some way,
|
||||
please let me know in any of the following ways:
|
||||
|
||||
- `star bidict on GitHub <https://github.com/jab/bidict>`__
|
||||
- post in `GitHub Discussions <https://github.com/jab/bidict/discussions>`__
|
||||
- `email me <mailto:jabronson@gmail.com>`__
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
For bidict release notes, see the :doc:`changelog`. [#fn-changelog]_
|
||||
|
||||
|
||||
Release Notifications
|
||||
---------------------
|
||||
|
||||
.. duplicated in CHANGELOG.rst:
|
||||
(Would use `.. include::` but GitHub's renderer doesn't support it.)
|
||||
|
||||
Watch `bidict releases on GitHub <https://github.com/jab/bidict/releases>`__
|
||||
to be notified when new versions of bidict are published.
|
||||
Click the "Watch" dropdown, choose "Custom", and then choose "Releases".
|
||||
|
||||
|
||||
Learning from bidict
|
||||
--------------------
|
||||
|
||||
One of the best things about bidict
|
||||
is that it touches a surprising number of
|
||||
interesting Python corners,
|
||||
especially given its small size and scope.
|
||||
|
||||
Check out :doc:`learning-from-bidict` [#fn-learning]_
|
||||
if you're interested in learning more.
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
I have been bidict's sole maintainer
|
||||
and `active contributor <https://github.com/jab/bidict/graphs/contributors>`__
|
||||
since I started the project ~15 years ago.
|
||||
|
||||
Your help would be most welcome!
|
||||
See the :doc:`contributors-guide` [#fn-contributing]_
|
||||
for more information.
|
||||
|
||||
|
||||
Sponsoring
|
||||
----------
|
||||
|
||||
.. duplicated in CONTRIBUTING.rst
|
||||
(Would use `.. include::` but GitHub's renderer doesn't support it.)
|
||||
|
||||
.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4
|
||||
:target: https://github.com/sponsors/jab
|
||||
:alt: Sponsor through GitHub
|
||||
|
||||
Bidict is the product of thousands of hours of my unpaid work
|
||||
over the 15+ years that I've been the sole maintainer.
|
||||
|
||||
If bidict has helped you or your company accomplish your work,
|
||||
please sponsor my work through one of the following,
|
||||
and/or ask your company to do the same:
|
||||
|
||||
- `GitHub <https://github.com/sponsors/jab>`__
|
||||
- `PayPal <https://www.paypal.com/cgi-bin/webscr?cmd=_xclick&business=jabronson%40gmail%2ecom&lc=US&item_name=Sponsor%20bidict>`__
|
||||
- `Tidelift <https://tidelift.com>`__
|
||||
- `thanks.dev <https://thanks.dev>`__
|
||||
- `Gumroad <https://gumroad.com/l/bidict>`__
|
||||
- `a support engagement with my LLC <#enterprise-support>`__
|
||||
|
||||
If you're not sure which to use, GitHub is an easy option,
|
||||
especially if you already have a GitHub account.
|
||||
Just choose a monthly or one-time amount, and GitHub handles everything else.
|
||||
Your bidict sponsorship on GitHub will automatically go
|
||||
on the same regular bill as any other GitHub charges you pay for.
|
||||
PayPal is another easy option for one-time contributions.
|
||||
|
||||
See the following for rationale and examples of companies
|
||||
supporting the open source projects they depend on
|
||||
in this manner:
|
||||
|
||||
- `<https://engineering.atspotify.com/2022/04/announcing-the-spotify-foss-fund/>`__
|
||||
- `<https://blog.sentry.io/2021/10/21/we-just-gave-154-999-dollars-and-89-cents-to-open-source-maintainers>`__
|
||||
- `<https://engineering.indeedblog.com/blog/2019/07/foss-fund-six-months-in/>`__
|
||||
|
||||
.. - `<https://sethmlarson.dev/blog/people-in-your-software-supply-chain>`__
|
||||
.. - `<https://www.cognitect.com/blog/supporting-open-source-developers>`__
|
||||
.. - `<https://vorpus.org/blog/the-unreasonable-effectiveness-of-investment-in-open-source-infrastructure/>`__
|
||||
|
||||
|
||||
Finding Documentation
|
||||
---------------------
|
||||
|
||||
If you're viewing this on `<https://bidict.readthedocs.io>`__,
|
||||
note that multiple versions of the documentation are available,
|
||||
and you can choose a different version using the popup menu at the bottom-right.
|
||||
Please make sure you're viewing the version of the documentation
|
||||
that corresponds to the version of bidict you'd like to use.
|
||||
|
||||
If you're viewing this on GitHub, PyPI, or some other place
|
||||
that can't render and link this documentation properly
|
||||
and are seeing broken links,
|
||||
try these alternate links instead:
|
||||
|
||||
.. [#fn-intro] `<https://bidict.readthedocs.io/intro.html>`__ | `<docs/intro.rst>`__
|
||||
|
||||
.. [#fn-changelog] `<https://bidict.readthedocs.io/changelog.html>`__ | `<CHANGELOG.rst>`__
|
||||
|
||||
.. [#fn-learning] `<https://bidict.readthedocs.io/learning-from-bidict.html>`__ | `<docs/learning-from-bidict.rst>`__
|
||||
|
||||
.. [#fn-contributing] `<https://bidict.readthedocs.io/contributors-guide.html>`__ | `<CONTRIBUTING.rst>`__
|
||||
@@ -0,0 +1,31 @@
|
||||
bidict-0.23.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
bidict-0.23.1.dist-info/LICENSE,sha256=8_U63OyqSNc6ZuI4-lupBstBh2eDtF0ooTRrMULuvZo,16784
|
||||
bidict-0.23.1.dist-info/METADATA,sha256=2ovIRm6Df8gdwAMekGqkeBSF5TWj2mv1jpmh4W4ks7o,8704
|
||||
bidict-0.23.1.dist-info/RECORD,,
|
||||
bidict-0.23.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
||||
bidict-0.23.1.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7
|
||||
bidict/__init__.py,sha256=pL87KsrDpBsl3AG09LQk1t1TSFt0hIJVYa2POMdErN8,4398
|
||||
bidict/__pycache__/__init__.cpython-311.pyc,,
|
||||
bidict/__pycache__/_abc.cpython-311.pyc,,
|
||||
bidict/__pycache__/_base.cpython-311.pyc,,
|
||||
bidict/__pycache__/_bidict.cpython-311.pyc,,
|
||||
bidict/__pycache__/_dup.cpython-311.pyc,,
|
||||
bidict/__pycache__/_exc.cpython-311.pyc,,
|
||||
bidict/__pycache__/_frozen.cpython-311.pyc,,
|
||||
bidict/__pycache__/_iter.cpython-311.pyc,,
|
||||
bidict/__pycache__/_orderedbase.cpython-311.pyc,,
|
||||
bidict/__pycache__/_orderedbidict.cpython-311.pyc,,
|
||||
bidict/__pycache__/_typing.cpython-311.pyc,,
|
||||
bidict/__pycache__/metadata.cpython-311.pyc,,
|
||||
bidict/_abc.py,sha256=SMCNdCsmqSWg0OGnMZtnnXY8edjXcyZup5tva4HBm_c,3172
|
||||
bidict/_base.py,sha256=YiauA0aj52fNB6cfZ4gBt6OV-CRQoZm7WVhuw1nT-Cg,24439
|
||||
bidict/_bidict.py,sha256=Sr-RoEzWOaxpnDRbDJ7ngaGRIsyGnqZgzvR-NyT4jl4,6923
|
||||
bidict/_dup.py,sha256=YAn5gWA6lwMBA5A6ebVF19UTZyambGS8WxmbK4TN1Ww,2079
|
||||
bidict/_exc.py,sha256=HnD_WgteI5PrXa3zBx9RUiGlgnZTO6CF4nIU9p3-njk,1066
|
||||
bidict/_frozen.py,sha256=p4TaRHKeyTs0KmlpwSnZiTlN_CR4J97kAgBpNdZHQMs,1771
|
||||
bidict/_iter.py,sha256=zVUx-hJ1M4YuJROoFWRjPKlcaFnyo1AAuRpOaKAFhOQ,1530
|
||||
bidict/_orderedbase.py,sha256=M7v5rHa7vrym9Z3DxQBFQDxjnrr39Z8p26V0c1PggoE,8942
|
||||
bidict/_orderedbidict.py,sha256=pPnmC19mIISrj8_yjnb-4r_ti1B74tD5eTd08DETNuI,7080
|
||||
bidict/_typing.py,sha256=AylMZpBhEFTQegfziPSxfKkKLk7oUsH6o3awDIg2z_k,1289
|
||||
bidict/metadata.py,sha256=BMIKu6fBY_OKeV_q48EpumE7MdmFw8rFcdaUz8kcIYk,573
|
||||
bidict/py.typed,sha256=RJao5SVFYIp8IfbxhL_SpZkBQYe3XXzPlobSRdh4B_c,16
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.42.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
bidict
|
||||
103
tapdown/lib/python3.11/site-packages/bidict/__init__.py
Normal file
103
tapdown/lib/python3.11/site-packages/bidict/__init__.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# * Welcome to the bidict source code *
|
||||
# ============================================================================
|
||||
|
||||
# Reading through the code? You'll find a "Code review nav" comment like the one
|
||||
# below at the top and bottom of the key source files. Follow these cues to take
|
||||
# a path through the code that's optimized for familiarizing yourself with it.
|
||||
#
|
||||
# If you're not reading this on https://github.com/jab/bidict already, go there
|
||||
# to ensure you have the latest version of the code. While there, you can also
|
||||
# star the project, watch it for updates, fork the code, and submit an issue or
|
||||
# pull request with any proposed changes. More information can be found linked
|
||||
# from README.rst, which is also shown on https://github.com/jab/bidict.
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# Current: __init__.py Next: _abc.py →
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""The bidirectional mapping library for Python.
|
||||
|
||||
----
|
||||
|
||||
bidict by example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> from bidict import bidict
|
||||
>>> element_by_symbol = bidict({'H': 'hydrogen'})
|
||||
>>> element_by_symbol['H']
|
||||
'hydrogen'
|
||||
>>> element_by_symbol.inverse['hydrogen']
|
||||
'H'
|
||||
|
||||
|
||||
Please see https://github.com/jab/bidict for the most up-to-date code and
|
||||
https://bidict.readthedocs.io for the most up-to-date documentation
|
||||
if you are reading this elsewhere.
|
||||
|
||||
----
|
||||
|
||||
.. :copyright: (c) 2009-2024 Joshua Bronson.
|
||||
.. :license: MPLv2. See LICENSE for details.
|
||||
"""
|
||||
|
||||
# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members).
|
||||
from __future__ import annotations as _annotations
|
||||
|
||||
from contextlib import suppress as _suppress
|
||||
|
||||
from ._abc import BidirectionalMapping as BidirectionalMapping
|
||||
from ._abc import MutableBidirectionalMapping as MutableBidirectionalMapping
|
||||
from ._base import BidictBase as BidictBase
|
||||
from ._base import BidictKeysView as BidictKeysView
|
||||
from ._base import GeneratedBidictInverse as GeneratedBidictInverse
|
||||
from ._bidict import MutableBidict as MutableBidict
|
||||
from ._bidict import bidict as bidict
|
||||
from ._dup import DROP_NEW as DROP_NEW
|
||||
from ._dup import DROP_OLD as DROP_OLD
|
||||
from ._dup import ON_DUP_DEFAULT as ON_DUP_DEFAULT
|
||||
from ._dup import ON_DUP_DROP_OLD as ON_DUP_DROP_OLD
|
||||
from ._dup import ON_DUP_RAISE as ON_DUP_RAISE
|
||||
from ._dup import RAISE as RAISE
|
||||
from ._dup import OnDup as OnDup
|
||||
from ._dup import OnDupAction as OnDupAction
|
||||
from ._exc import BidictException as BidictException
|
||||
from ._exc import DuplicationError as DuplicationError
|
||||
from ._exc import KeyAndValueDuplicationError as KeyAndValueDuplicationError
|
||||
from ._exc import KeyDuplicationError as KeyDuplicationError
|
||||
from ._exc import ValueDuplicationError as ValueDuplicationError
|
||||
from ._frozen import frozenbidict as frozenbidict
|
||||
from ._iter import inverted as inverted
|
||||
from ._orderedbase import OrderedBidictBase as OrderedBidictBase
|
||||
from ._orderedbidict import OrderedBidict as OrderedBidict
|
||||
from .metadata import __author__ as __author__
|
||||
from .metadata import __copyright__ as __copyright__
|
||||
from .metadata import __description__ as __description__
|
||||
from .metadata import __license__ as __license__
|
||||
from .metadata import __url__ as __url__
|
||||
from .metadata import __version__ as __version__
|
||||
|
||||
|
||||
# Set __module__ of re-exported classes to the 'bidict' top-level module, so that e.g.
|
||||
# 'bidict.bidict' shows up as 'bidict.bidict` rather than 'bidict._bidict.bidict'.
|
||||
for _obj in tuple(locals().values()): # pragma: no cover
|
||||
if not getattr(_obj, '__module__', '').startswith('bidict.'):
|
||||
continue
|
||||
with _suppress(AttributeError):
|
||||
_obj.__module__ = 'bidict'
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# Current: __init__.py Next: _abc.py →
|
||||
# ============================================================================
|
||||
79
tapdown/lib/python3.11/site-packages/bidict/_abc.py
Normal file
79
tapdown/lib/python3.11/site-packages/bidict/_abc.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""Provide the :class:`BidirectionalMapping` abstract base class."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from abc import abstractmethod
|
||||
|
||||
from ._typing import KT
|
||||
from ._typing import VT
|
||||
|
||||
|
||||
class BidirectionalMapping(t.Mapping[KT, VT]):
|
||||
"""Abstract base class for bidirectional mapping types.
|
||||
|
||||
Extends :class:`collections.abc.Mapping` primarily by adding the
|
||||
(abstract) :attr:`inverse` property,
|
||||
which implementers of :class:`BidirectionalMapping`
|
||||
should override to return a reference to the inverse
|
||||
:class:`BidirectionalMapping` instance.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def inverse(self) -> BidirectionalMapping[VT, KT]:
|
||||
"""The inverse of this bidirectional mapping instance.
|
||||
|
||||
*See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv`
|
||||
|
||||
:raises NotImplementedError: Meant to be overridden in subclasses.
|
||||
"""
|
||||
# The @abstractmethod decorator prevents subclasses from being instantiated unless they
|
||||
# override this method. But an overriding implementation may merely return super().inverse,
|
||||
# in which case this implementation is used. Raise NotImplementedError to indicate that
|
||||
# subclasses must actually provide their own implementation.
|
||||
raise NotImplementedError
|
||||
|
||||
def __inverted__(self) -> t.Iterator[tuple[VT, KT]]:
|
||||
"""Get an iterator over the items in :attr:`inverse`.
|
||||
|
||||
This is functionally equivalent to iterating over the items in the
|
||||
forward mapping and inverting each one on the fly, but this provides a
|
||||
more efficient implementation: Assuming the already-inverted items
|
||||
are stored in :attr:`inverse`, just return an iterator over them directly.
|
||||
|
||||
Providing this default implementation enables external functions,
|
||||
particularly :func:`~bidict.inverted`, to use this optimized
|
||||
implementation when available, instead of having to invert on the fly.
|
||||
|
||||
*See also* :func:`bidict.inverted`
|
||||
"""
|
||||
return iter(self.inverse.items())
|
||||
|
||||
|
||||
class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], t.MutableMapping[KT, VT]):
|
||||
"""Abstract base class for mutable bidirectional mapping types."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
|
||||
# ============================================================================
|
||||
556
tapdown/lib/python3.11/site-packages/bidict/_base.py
Normal file
556
tapdown/lib/python3.11/site-packages/bidict/_base.py
Normal file
@@ -0,0 +1,556 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: _abc.py Current: _base.py Next: _frozen.py →
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`BidictBase`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
import weakref
|
||||
from itertools import starmap
|
||||
from operator import eq
|
||||
from types import MappingProxyType
|
||||
|
||||
from ._abc import BidirectionalMapping
|
||||
from ._dup import DROP_NEW
|
||||
from ._dup import DROP_OLD
|
||||
from ._dup import ON_DUP_DEFAULT
|
||||
from ._dup import RAISE
|
||||
from ._dup import OnDup
|
||||
from ._exc import DuplicationError
|
||||
from ._exc import KeyAndValueDuplicationError
|
||||
from ._exc import KeyDuplicationError
|
||||
from ._exc import ValueDuplicationError
|
||||
from ._iter import inverted
|
||||
from ._iter import iteritems
|
||||
from ._typing import KT
|
||||
from ._typing import MISSING
|
||||
from ._typing import OKT
|
||||
from ._typing import OVT
|
||||
from ._typing import VT
|
||||
from ._typing import Maplike
|
||||
from ._typing import MapOrItems
|
||||
|
||||
|
||||
OldKV = t.Tuple[OKT[KT], OVT[VT]]
|
||||
DedupResult = t.Optional[OldKV[KT, VT]]
|
||||
Unwrites = t.List[t.Tuple[t.Any, ...]]
|
||||
BT = t.TypeVar('BT', bound='BidictBase[t.Any, t.Any]')
|
||||
|
||||
|
||||
class BidictKeysView(t.KeysView[KT], t.ValuesView[KT]):
|
||||
"""Since the keys of a bidict are the values of its inverse (and vice versa),
|
||||
the :class:`~collections.abc.ValuesView` result of calling *bi.values()*
|
||||
is also a :class:`~collections.abc.KeysView` of *bi.inverse*.
|
||||
"""
|
||||
|
||||
|
||||
class BidictBase(BidirectionalMapping[KT, VT]):
|
||||
"""Base class implementing :class:`BidirectionalMapping`."""
|
||||
|
||||
#: The default :class:`~bidict.OnDup`
|
||||
#: that governs behavior when a provided item
|
||||
#: duplicates the key or value of other item(s).
|
||||
#:
|
||||
#: *See also*
|
||||
#: :ref:`basic-usage:Values Must Be Unique` (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique),
|
||||
#: :doc:`extending` (https://bidict.rtfd.io/extending.html)
|
||||
on_dup = ON_DUP_DEFAULT
|
||||
|
||||
_fwdm: t.MutableMapping[KT, VT] #: the backing forward mapping (*key* → *val*)
|
||||
_invm: t.MutableMapping[VT, KT] #: the backing inverse mapping (*val* → *key*)
|
||||
|
||||
# Use Any rather than KT/VT in the following to avoid "ClassVar cannot contain type variables" errors:
|
||||
_fwdm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing forward mapping
|
||||
_invm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing inverse mapping
|
||||
|
||||
#: The class of the inverse bidict instance.
|
||||
_inv_cls: t.ClassVar[type[BidictBase[t.Any, t.Any]]]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
super().__init_subclass__()
|
||||
cls._init_class()
|
||||
|
||||
@classmethod
|
||||
def _init_class(cls) -> None:
|
||||
cls._ensure_inv_cls()
|
||||
cls._set_reversed()
|
||||
|
||||
__reversed__: t.ClassVar[t.Any]
|
||||
|
||||
@classmethod
|
||||
def _set_reversed(cls) -> None:
|
||||
"""Set __reversed__ for subclasses that do not set it explicitly
|
||||
according to whether backing mappings are reversible.
|
||||
"""
|
||||
if cls is not BidictBase:
|
||||
resolved = cls.__reversed__
|
||||
overridden = resolved is not BidictBase.__reversed__
|
||||
if overridden: # E.g. OrderedBidictBase, OrderedBidict
|
||||
return
|
||||
backing_reversible = all(issubclass(i, t.Reversible) for i in (cls._fwdm_cls, cls._invm_cls))
|
||||
cls.__reversed__ = _fwdm_reversed if backing_reversible else None
|
||||
|
||||
@classmethod
|
||||
def _ensure_inv_cls(cls) -> None:
|
||||
"""Ensure :attr:`_inv_cls` is set, computing it dynamically if necessary.
|
||||
|
||||
All subclasses provided in :mod:`bidict` are their own inverse classes,
|
||||
i.e., their backing forward and inverse mappings are both the same type,
|
||||
but users may define subclasses where this is not the case.
|
||||
This method ensures that the inverse class is computed correctly regardless.
|
||||
|
||||
See: :ref:`extending:Dynamic Inverse Class Generation`
|
||||
(https://bidict.rtfd.io/extending.html#dynamic-inverse-class-generation)
|
||||
"""
|
||||
# This _ensure_inv_cls() method is (indirectly) corecursive with _make_inv_cls() below
|
||||
# in the case that we need to dynamically generate the inverse class:
|
||||
# 1. _ensure_inv_cls() calls cls._make_inv_cls()
|
||||
# 2. cls._make_inv_cls() calls type(..., (cls, ...), ...) to dynamically generate inv_cls
|
||||
# 3. Our __init_subclass__ hook (see above) is automatically called on inv_cls
|
||||
# 4. inv_cls.__init_subclass__() calls inv_cls._ensure_inv_cls()
|
||||
# 5. inv_cls._ensure_inv_cls() resolves to this implementation
|
||||
# (inv_cls deliberately does not override this), so we're back where we started.
|
||||
# But since the _make_inv_cls() call will have set inv_cls.__dict__._inv_cls,
|
||||
# just check if it's already set before calling _make_inv_cls() to prevent infinite recursion.
|
||||
if getattr(cls, '__dict__', {}).get('_inv_cls'): # Don't assume cls.__dict__ (e.g. mypyc native class)
|
||||
return
|
||||
cls._inv_cls = cls._make_inv_cls()
|
||||
|
||||
@classmethod
|
||||
def _make_inv_cls(cls: type[BT]) -> type[BT]:
|
||||
diff = cls._inv_cls_dict_diff()
|
||||
cls_is_own_inv = all(getattr(cls, k, MISSING) == v for (k, v) in diff.items())
|
||||
if cls_is_own_inv:
|
||||
return cls
|
||||
# Suppress auto-calculation of _inv_cls's _inv_cls since we know it already.
|
||||
# Works with the guard in BidictBase._ensure_inv_cls() to prevent infinite recursion.
|
||||
diff['_inv_cls'] = cls
|
||||
inv_cls = type(f'{cls.__name__}Inv', (cls, GeneratedBidictInverse), diff)
|
||||
inv_cls.__module__ = cls.__module__
|
||||
return t.cast(t.Type[BT], inv_cls)
|
||||
|
||||
@classmethod
|
||||
def _inv_cls_dict_diff(cls) -> dict[str, t.Any]:
|
||||
return {
|
||||
'_fwdm_cls': cls._invm_cls,
|
||||
'_invm_cls': cls._fwdm_cls,
|
||||
}
|
||||
|
||||
def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
|
||||
"""Make a new bidirectional mapping.
|
||||
The signature behaves like that of :class:`dict`.
|
||||
ktems passed via positional arg are processed first,
|
||||
followed by any items passed via keyword argument.
|
||||
Any duplication encountered along the way
|
||||
is handled as per :attr:`on_dup`.
|
||||
"""
|
||||
self._fwdm = self._fwdm_cls()
|
||||
self._invm = self._invm_cls()
|
||||
self._update(arg, kw, rollback=False)
|
||||
|
||||
# If Python ever adds support for higher-kinded types, `inverse` could use them, e.g.
|
||||
# def inverse(self: BT[KT, VT]) -> BT[VT, KT]:
|
||||
# Ref: https://github.com/python/typing/issues/548#issuecomment-621571821
|
||||
@property
|
||||
def inverse(self) -> BidictBase[VT, KT]:
|
||||
"""The inverse of this bidirectional mapping instance."""
|
||||
# When `bi.inverse` is called for the first time, this method
|
||||
# computes the inverse instance, stores it for subsequent use, and then
|
||||
# returns it. It also stores a reference on `bi.inverse` back to `bi`,
|
||||
# but uses a weakref to avoid creating a reference cycle. Strong references
|
||||
# to inverse instances are stored in ._inv, and weak references are stored
|
||||
# in ._invweak.
|
||||
|
||||
# First check if a strong reference is already stored.
|
||||
inv: BidictBase[VT, KT] | None = getattr(self, '_inv', None)
|
||||
if inv is not None:
|
||||
return inv
|
||||
# Next check if a weak reference is already stored.
|
||||
invweak = getattr(self, '_invweak', None)
|
||||
if invweak is not None:
|
||||
inv = invweak() # Try to resolve a strong reference and return it.
|
||||
if inv is not None:
|
||||
return inv
|
||||
# No luck. Compute the inverse reference and store it for subsequent use.
|
||||
inv = self._make_inverse()
|
||||
self._inv: BidictBase[VT, KT] | None = inv
|
||||
self._invweak: weakref.ReferenceType[BidictBase[VT, KT]] | None = None
|
||||
# Also store a weak reference back to `instance` on its inverse instance, so that
|
||||
# the second `.inverse` access in `bi.inverse.inverse` hits the cached weakref.
|
||||
inv._inv = None
|
||||
inv._invweak = weakref.ref(self)
|
||||
# In e.g. `bidict().inverse.inverse`, this design ensures that a strong reference
|
||||
# back to the original instance is retained before its refcount drops to zero,
|
||||
# avoiding an unintended potential deallocation.
|
||||
return inv
|
||||
|
||||
def _make_inverse(self) -> BidictBase[VT, KT]:
|
||||
inv: BidictBase[VT, KT] = self._inv_cls()
|
||||
inv._fwdm = self._invm
|
||||
inv._invm = self._fwdm
|
||||
return inv
|
||||
|
||||
@property
|
||||
def inv(self) -> BidictBase[VT, KT]:
|
||||
"""Alias for :attr:`inverse`."""
|
||||
return self.inverse
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""See :func:`repr`."""
|
||||
clsname = self.__class__.__name__
|
||||
items = dict(self.items()) if self else ''
|
||||
return f'{clsname}({items})'
|
||||
|
||||
def values(self) -> BidictKeysView[VT]:
|
||||
"""A set-like object providing a view on the contained values.
|
||||
|
||||
Since the values of a bidict are equivalent to the keys of its inverse,
|
||||
this method returns a set-like object for this bidict's values
|
||||
rather than just a collections.abc.ValuesView.
|
||||
This object supports set operations like union and difference,
|
||||
and constant- rather than linear-time containment checks,
|
||||
and is no more expensive to provide than the less capable
|
||||
collections.abc.ValuesView would be.
|
||||
|
||||
See :meth:`keys` for more information.
|
||||
"""
|
||||
return t.cast(BidictKeysView[VT], self.inverse.keys())
|
||||
|
||||
def keys(self) -> t.KeysView[KT]:
|
||||
"""A set-like object providing a view on the contained keys.
|
||||
|
||||
When *b._fwdm* is a :class:`dict`, *b.keys()* returns a
|
||||
*dict_keys* object that behaves exactly the same as
|
||||
*collections.abc.KeysView(b)*, except for
|
||||
|
||||
- offering better performance
|
||||
|
||||
- being reversible on Python 3.8+
|
||||
|
||||
- having a .mapping attribute in Python 3.10+
|
||||
that exposes a mappingproxy to *b._fwdm*.
|
||||
"""
|
||||
fwdm, fwdm_cls = self._fwdm, self._fwdm_cls
|
||||
return fwdm.keys() if fwdm_cls is dict else BidictKeysView(self)
|
||||
|
||||
def items(self) -> t.ItemsView[KT, VT]:
|
||||
"""A set-like object providing a view on the contained items.
|
||||
|
||||
When *b._fwdm* is a :class:`dict`, *b.items()* returns a
|
||||
*dict_items* object that behaves exactly the same as
|
||||
*collections.abc.ItemsView(b)*, except for:
|
||||
|
||||
- offering better performance
|
||||
|
||||
- being reversible on Python 3.8+
|
||||
|
||||
- having a .mapping attribute in Python 3.10+
|
||||
that exposes a mappingproxy to *b._fwdm*.
|
||||
"""
|
||||
return self._fwdm.items() if self._fwdm_cls is dict else super().items()
|
||||
|
||||
# The inherited collections.abc.Mapping.__contains__() method is implemented by doing a `try`
|
||||
# `except KeyError` around `self[key]`. The following implementation is much faster,
|
||||
# especially in the missing case.
|
||||
def __contains__(self, key: t.Any) -> bool:
|
||||
"""True if the mapping contains the specified key, else False."""
|
||||
return key in self._fwdm
|
||||
|
||||
# The inherited collections.abc.Mapping.__eq__() method is implemented in terms of an inefficient
|
||||
# `dict(self.items()) == dict(other.items())` comparison, so override it with a
|
||||
# more efficient implementation.
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""*x.__eq__(other) ⟺ x == other*
|
||||
|
||||
Equivalent to *dict(x.items()) == dict(other.items())*
|
||||
but more efficient.
|
||||
|
||||
Note that :meth:`bidict's __eq__() <bidict.BidictBase.__eq__>` implementation
|
||||
is inherited by subclasses,
|
||||
in particular by the ordered bidict subclasses,
|
||||
so even with ordered bidicts,
|
||||
:ref:`== comparison is order-insensitive <eq-order-insensitive>`
|
||||
(https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive).
|
||||
|
||||
*See also* :meth:`equals_order_sensitive`
|
||||
"""
|
||||
if isinstance(other, t.Mapping):
|
||||
return self._fwdm.items() == other.items()
|
||||
# Ref: https://docs.python.org/3/library/constants.html#NotImplemented
|
||||
return NotImplemented
|
||||
|
||||
def equals_order_sensitive(self, other: object) -> bool:
|
||||
"""Order-sensitive equality check.
|
||||
|
||||
*See also* :ref:`eq-order-insensitive`
|
||||
(https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive)
|
||||
"""
|
||||
if not isinstance(other, t.Mapping) or len(self) != len(other):
|
||||
return False
|
||||
return all(starmap(eq, zip(self.items(), other.items())))
|
||||
|
||||
def _dedup(self, key: KT, val: VT, on_dup: OnDup) -> DedupResult[KT, VT]:
|
||||
"""Check *key* and *val* for any duplication in self.
|
||||
|
||||
Handle any duplication as per the passed in *on_dup*.
|
||||
|
||||
If (key, val) is already present, return None
|
||||
since writing (key, val) would be a no-op.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.DROP_NEW`, return None.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.RAISE`, raise the appropriate exception.
|
||||
|
||||
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
|
||||
:attr:`~bidict.DROP_OLD`, or if no duplication is found,
|
||||
return *(oldkey, oldval)*.
|
||||
"""
|
||||
fwdm, invm = self._fwdm, self._invm
|
||||
oldval: OVT[VT] = fwdm.get(key, MISSING)
|
||||
oldkey: OKT[KT] = invm.get(val, MISSING)
|
||||
isdupkey, isdupval = oldval is not MISSING, oldkey is not MISSING
|
||||
if isdupkey and isdupval:
|
||||
if key == oldkey:
|
||||
assert val == oldval
|
||||
# (key, val) duplicates an existing item -> no-op.
|
||||
return None
|
||||
# key and val each duplicate a different existing item.
|
||||
if on_dup.val is RAISE:
|
||||
raise KeyAndValueDuplicationError(key, val)
|
||||
if on_dup.val is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.val is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
elif isdupkey:
|
||||
if on_dup.key is RAISE:
|
||||
raise KeyDuplicationError(key)
|
||||
if on_dup.key is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.key is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
elif isdupval:
|
||||
if on_dup.val is RAISE:
|
||||
raise ValueDuplicationError(val)
|
||||
if on_dup.val is DROP_NEW:
|
||||
return None
|
||||
assert on_dup.val is DROP_OLD
|
||||
# Fall through to the return statement on the last line.
|
||||
# else neither isdupkey nor isdupval.
|
||||
return oldkey, oldval
|
||||
|
||||
def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None:
|
||||
"""Insert (newkey, newval), extending *unwrites* with associated inverse operations if provided.
|
||||
|
||||
*oldkey* and *oldval* are as returned by :meth:`_dedup`.
|
||||
|
||||
If *unwrites* is not None, it is extended with the inverse operations necessary to undo the write.
|
||||
This design allows :meth:`_update` to roll back a partially applied update that fails part-way through
|
||||
when necessary.
|
||||
|
||||
This design also allows subclasses that require additional operations to easily extend this implementation.
|
||||
For example, :class:`bidict.OrderedBidictBase` calls this inherited implementation, and then extends *unwrites*
|
||||
with additional operations needed to keep its internal linked list nodes consistent with its items' order
|
||||
as changes are made.
|
||||
"""
|
||||
fwdm, invm = self._fwdm, self._invm
|
||||
fwdm_set, invm_set = fwdm.__setitem__, invm.__setitem__
|
||||
fwdm_del, invm_del = fwdm.__delitem__, invm.__delitem__
|
||||
# Always perform the following writes regardless of duplication.
|
||||
fwdm_set(newkey, newval)
|
||||
invm_set(newval, newkey)
|
||||
if oldval is MISSING and oldkey is MISSING: # no key or value duplication
|
||||
# {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5}
|
||||
if unwrites is not None:
|
||||
unwrites.extend((
|
||||
(fwdm_del, newkey),
|
||||
(invm_del, newval),
|
||||
))
|
||||
elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items
|
||||
# {0: 1, 2: 3} | {0: 3} => {0: 3}
|
||||
fwdm_del(oldkey)
|
||||
invm_del(oldval)
|
||||
if unwrites is not None:
|
||||
unwrites.extend((
|
||||
(fwdm_set, newkey, oldval),
|
||||
(invm_set, oldval, newkey),
|
||||
(fwdm_set, oldkey, newval),
|
||||
(invm_set, newval, oldkey),
|
||||
))
|
||||
elif oldval is not MISSING: # just key duplication
|
||||
# {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4}
|
||||
invm_del(oldval)
|
||||
if unwrites is not None:
|
||||
unwrites.extend((
|
||||
(fwdm_set, newkey, oldval),
|
||||
(invm_set, oldval, newkey),
|
||||
(invm_del, newval),
|
||||
))
|
||||
else:
|
||||
assert oldkey is not MISSING # just value duplication
|
||||
# {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3}
|
||||
fwdm_del(oldkey)
|
||||
if unwrites is not None:
|
||||
unwrites.extend((
|
||||
(fwdm_set, oldkey, newval),
|
||||
(invm_set, newval, oldkey),
|
||||
(fwdm_del, newkey),
|
||||
))
|
||||
|
||||
def _update(
|
||||
self,
|
||||
arg: MapOrItems[KT, VT],
|
||||
kw: t.Mapping[str, VT] = MappingProxyType({}),
|
||||
*,
|
||||
rollback: bool | None = None,
|
||||
on_dup: OnDup | None = None,
|
||||
) -> None:
|
||||
"""Update with the items from *arg* and *kw*, maybe failing and rolling back as per *on_dup* and *rollback*."""
|
||||
# Note: We must process input in a single pass, since arg may be a generator.
|
||||
if not isinstance(arg, (t.Iterable, Maplike)):
|
||||
raise TypeError(f"'{arg.__class__.__name__}' object is not iterable")
|
||||
if not arg and not kw:
|
||||
return
|
||||
if on_dup is None:
|
||||
on_dup = self.on_dup
|
||||
if rollback is None:
|
||||
rollback = RAISE in on_dup
|
||||
|
||||
# Fast path when we're empty and updating only from another bidict (i.e. no dup vals in new items).
|
||||
if not self and not kw and isinstance(arg, BidictBase):
|
||||
self._init_from(arg)
|
||||
return
|
||||
|
||||
# Fast path when we're adding more items than we contain already and rollback is enabled:
|
||||
# Update a copy of self with rollback disabled. Fail if that fails, otherwise become the copy.
|
||||
if rollback and isinstance(arg, t.Sized) and len(arg) + len(kw) > len(self):
|
||||
tmp = self.copy()
|
||||
tmp._update(arg, kw, rollback=False, on_dup=on_dup)
|
||||
self._init_from(tmp)
|
||||
return
|
||||
|
||||
# In all other cases, benchmarking has indicated that the update is best implemented as follows:
|
||||
# For each new item, perform a dup check (raising if necessary), and apply the associated writes we need to
|
||||
# perform on our backing _fwdm and _invm mappings. If rollback is enabled, also compute the associated unwrites
|
||||
# as we go. If the update results in a DuplicationError and rollback is enabled, apply the accumulated unwrites
|
||||
# before raising, to ensure that we fail clean.
|
||||
write = self._write
|
||||
unwrites: Unwrites | None = [] if rollback else None
|
||||
for key, val in iteritems(arg, **kw):
|
||||
try:
|
||||
dedup_result = self._dedup(key, val, on_dup)
|
||||
except DuplicationError:
|
||||
if unwrites is not None:
|
||||
for fn, *args in reversed(unwrites):
|
||||
fn(*args)
|
||||
raise
|
||||
if dedup_result is not None:
|
||||
write(key, val, *dedup_result, unwrites=unwrites)
|
||||
|
||||
def __copy__(self: BT) -> BT:
|
||||
"""Used for the copy protocol. See the :mod:`copy` module."""
|
||||
return self.copy()
|
||||
|
||||
def copy(self: BT) -> BT:
|
||||
"""Make a (shallow) copy of this bidict."""
|
||||
# Could just `return self.__class__(self)` here, but the below is faster. The former
|
||||
# would copy this bidict's items into a new instance one at a time (checking for duplication
|
||||
# for each item), whereas the below copies from the backing mappings all at once, and foregoes
|
||||
# item-by-item duplication checking since the backing mappings have been checked already.
|
||||
return self._from_other(self.__class__, self)
|
||||
|
||||
@staticmethod
|
||||
def _from_other(bt: type[BT], other: MapOrItems[KT, VT], inv: bool = False) -> BT:
|
||||
"""Fast, private constructor based on :meth:`_init_from`.
|
||||
|
||||
If *inv* is true, return the inverse of the instance instead of the instance itself.
|
||||
(Useful for pickling with dynamically-generated inverse classes -- see :meth:`__reduce__`.)
|
||||
"""
|
||||
inst = bt()
|
||||
inst._init_from(other)
|
||||
return t.cast(BT, inst.inverse) if inv else inst
|
||||
|
||||
def _init_from(self, other: MapOrItems[KT, VT]) -> None:
|
||||
"""Fast init from *other*, bypassing item-by-item duplication checking."""
|
||||
self._fwdm.clear()
|
||||
self._invm.clear()
|
||||
self._fwdm.update(other)
|
||||
# If other is a bidict, use its existing backing inverse mapping, otherwise
|
||||
# other could be a generator that's now exhausted, so invert self._fwdm on the fly.
|
||||
inv = other.inverse if isinstance(other, BidictBase) else inverted(self._fwdm)
|
||||
self._invm.update(inv)
|
||||
|
||||
# other's type is Mapping rather than Maplike since bidict() | SupportsKeysAndGetItem({})
|
||||
# raises a TypeError, just like dict() | SupportsKeysAndGetItem({}) does.
|
||||
def __or__(self: BT, other: t.Mapping[KT, VT]) -> BT:
|
||||
"""Return self|other."""
|
||||
if not isinstance(other, t.Mapping):
|
||||
return NotImplemented
|
||||
new = self.copy()
|
||||
new._update(other, rollback=False)
|
||||
return new
|
||||
|
||||
def __ror__(self: BT, other: t.Mapping[KT, VT]) -> BT:
|
||||
"""Return other|self."""
|
||||
if not isinstance(other, t.Mapping):
|
||||
return NotImplemented
|
||||
new = self.__class__(other)
|
||||
new._update(self, rollback=False)
|
||||
return new
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""The number of contained items."""
|
||||
return len(self._fwdm)
|
||||
|
||||
def __iter__(self) -> t.Iterator[KT]:
|
||||
"""Iterator over the contained keys."""
|
||||
return iter(self._fwdm)
|
||||
|
||||
def __getitem__(self, key: KT) -> VT:
|
||||
"""*x.__getitem__(key) ⟺ x[key]*"""
|
||||
return self._fwdm[key]
|
||||
|
||||
def __reduce__(self) -> tuple[t.Any, ...]:
|
||||
"""Return state information for pickling."""
|
||||
cls = self.__class__
|
||||
inst: t.Mapping[t.Any, t.Any] = self
|
||||
# If this bidict's class is dynamically generated, pickle the inverse instead, whose (presumably not
|
||||
# dynamically generated) class the caller is more likely to have a reference to somewhere in sys.modules
|
||||
# that pickle can discover.
|
||||
if should_invert := isinstance(self, GeneratedBidictInverse):
|
||||
cls = self._inv_cls
|
||||
inst = self.inverse
|
||||
return self._from_other, (cls, dict(inst), should_invert)
|
||||
|
||||
|
||||
# See BidictBase._set_reversed() above.
|
||||
def _fwdm_reversed(self: BidictBase[KT, t.Any]) -> t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in reverse order."""
|
||||
assert isinstance(self._fwdm, t.Reversible)
|
||||
return reversed(self._fwdm)
|
||||
|
||||
|
||||
BidictBase._init_class()
|
||||
|
||||
|
||||
class GeneratedBidictInverse:
|
||||
"""Base class for dynamically-generated inverse bidict classes."""
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: _abc.py Current: _base.py Next: _frozen.py →
|
||||
# ============================================================================
|
||||
194
tapdown/lib/python3.11/site-packages/bidict/_bidict.py
Normal file
194
tapdown/lib/python3.11/site-packages/bidict/_bidict.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py →
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`MutableBidict` and :class:`bidict`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from ._abc import MutableBidirectionalMapping
|
||||
from ._base import BidictBase
|
||||
from ._dup import ON_DUP_DROP_OLD
|
||||
from ._dup import ON_DUP_RAISE
|
||||
from ._dup import OnDup
|
||||
from ._typing import DT
|
||||
from ._typing import KT
|
||||
from ._typing import MISSING
|
||||
from ._typing import ODT
|
||||
from ._typing import VT
|
||||
from ._typing import MapOrItems
|
||||
|
||||
|
||||
class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]):
|
||||
"""Base class for mutable bidirectional mappings."""
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
@property
|
||||
def inverse(self) -> MutableBidict[VT, KT]: ...
|
||||
|
||||
@property
|
||||
def inv(self) -> MutableBidict[VT, KT]: ...
|
||||
|
||||
def _pop(self, key: KT) -> VT:
|
||||
val = self._fwdm.pop(key)
|
||||
del self._invm[val]
|
||||
return val
|
||||
|
||||
def __delitem__(self, key: KT) -> None:
|
||||
"""*x.__delitem__(y) ⟺ del x[y]*"""
|
||||
self._pop(key)
|
||||
|
||||
def __setitem__(self, key: KT, val: VT) -> None:
|
||||
"""Set the value for *key* to *val*.
|
||||
|
||||
If *key* is already associated with *val*, this is a no-op.
|
||||
|
||||
If *key* is already associated with a different value,
|
||||
the old value will be replaced with *val*,
|
||||
as with dict's :meth:`__setitem__`.
|
||||
|
||||
If *val* is already associated with a different key,
|
||||
an exception is raised
|
||||
to protect against accidental removal of the key
|
||||
that's currently associated with *val*.
|
||||
|
||||
Use :meth:`put` instead if you want to specify different behavior in
|
||||
the case that the provided key or value duplicates an existing one.
|
||||
Or use :meth:`forceput` to unconditionally associate *key* with *val*,
|
||||
replacing any existing items as necessary to preserve uniqueness.
|
||||
|
||||
:raises bidict.ValueDuplicationError: if *val* duplicates that of an
|
||||
existing item.
|
||||
|
||||
:raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an
|
||||
existing item and *val* duplicates the value of a different
|
||||
existing item.
|
||||
"""
|
||||
self.put(key, val, on_dup=self.on_dup)
|
||||
|
||||
def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None:
|
||||
"""Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*.
|
||||
|
||||
For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`,
|
||||
then *key* will be associated with *val* if and only if
|
||||
*key* is not already associated with an existing value and
|
||||
*val* is not already associated with an existing key,
|
||||
otherwise an exception will be raised.
|
||||
|
||||
If *key* is already associated with *val*, this is a no-op.
|
||||
|
||||
:raises bidict.KeyDuplicationError: if attempting to insert an item
|
||||
whose key only duplicates an existing item's, and *on_dup.key* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
|
||||
:raises bidict.ValueDuplicationError: if attempting to insert an item
|
||||
whose value only duplicates an existing item's, and *on_dup.val* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
|
||||
:raises bidict.KeyAndValueDuplicationError: if attempting to insert an
|
||||
item whose key duplicates one existing item's, and whose value
|
||||
duplicates another existing item's, and *on_dup.val* is
|
||||
:attr:`~bidict.RAISE`.
|
||||
"""
|
||||
self._update(((key, val),), on_dup=on_dup)
|
||||
|
||||
def forceput(self, key: KT, val: VT) -> None:
|
||||
"""Associate *key* with *val* unconditionally.
|
||||
|
||||
Replace any existing mappings containing key *key* or value *val*
|
||||
as necessary to preserve uniqueness.
|
||||
"""
|
||||
self.put(key, val, on_dup=ON_DUP_DROP_OLD)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Remove all items."""
|
||||
self._fwdm.clear()
|
||||
self._invm.clear()
|
||||
|
||||
@t.overload
|
||||
def pop(self, key: KT, /) -> VT: ...
|
||||
@t.overload
|
||||
def pop(self, key: KT, default: DT = ..., /) -> VT | DT: ...
|
||||
|
||||
def pop(self, key: KT, default: ODT[DT] = MISSING, /) -> VT | DT:
|
||||
"""*x.pop(k[, d]) → v*
|
||||
|
||||
Remove specified key and return the corresponding value.
|
||||
|
||||
:raises KeyError: if *key* is not found and no *default* is provided.
|
||||
"""
|
||||
try:
|
||||
return self._pop(key)
|
||||
except KeyError:
|
||||
if default is MISSING:
|
||||
raise
|
||||
return default
|
||||
|
||||
def popitem(self) -> tuple[KT, VT]:
|
||||
"""*x.popitem() → (k, v)*
|
||||
|
||||
Remove and return some item as a (key, value) pair.
|
||||
|
||||
:raises KeyError: if *x* is empty.
|
||||
"""
|
||||
key, val = self._fwdm.popitem()
|
||||
del self._invm[val]
|
||||
return key, val
|
||||
|
||||
def update(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
|
||||
"""Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*."""
|
||||
self._update(arg, kw=kw)
|
||||
|
||||
def forceupdate(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
|
||||
"""Like a bulk :meth:`forceput`."""
|
||||
self._update(arg, kw=kw, on_dup=ON_DUP_DROP_OLD)
|
||||
|
||||
def putall(self, items: MapOrItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None:
|
||||
"""Like a bulk :meth:`put`.
|
||||
|
||||
If one of the given items causes an exception to be raised,
|
||||
none of the items is inserted.
|
||||
"""
|
||||
self._update(items, on_dup=on_dup)
|
||||
|
||||
# other's type is Mapping rather than Maplike since bidict() |= SupportsKeysAndGetItem({})
|
||||
# raises a TypeError, just like dict() |= SupportsKeysAndGetItem({}) does.
|
||||
def __ior__(self, other: t.Mapping[KT, VT]) -> MutableBidict[KT, VT]:
|
||||
"""Return self|=other."""
|
||||
self.update(other)
|
||||
return self
|
||||
|
||||
|
||||
class bidict(MutableBidict[KT, VT]):
|
||||
"""The main bidirectional mapping type.
|
||||
|
||||
See :ref:`intro:Introduction` and :ref:`basic-usage:Basic Usage`
|
||||
to get started (also available at https://bidict.rtfd.io).
|
||||
"""
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
@property
|
||||
def inverse(self) -> bidict[VT, KT]: ...
|
||||
|
||||
@property
|
||||
def inv(self) -> bidict[VT, KT]: ...
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py →
|
||||
# ============================================================================
|
||||
61
tapdown/lib/python3.11/site-packages/bidict/_dup.py
Normal file
61
tapdown/lib/python3.11/site-packages/bidict/_dup.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide :class:`OnDup` and related functionality."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class OnDupAction(Enum):
|
||||
"""An action to take to prevent duplication from occurring."""
|
||||
|
||||
#: Raise a :class:`~bidict.DuplicationError`.
|
||||
RAISE = 'RAISE'
|
||||
#: Overwrite existing items with new items.
|
||||
DROP_OLD = 'DROP_OLD'
|
||||
#: Keep existing items and drop new items.
|
||||
DROP_NEW = 'DROP_NEW'
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__class__.__name__}.{self.name}'
|
||||
|
||||
|
||||
RAISE: t.Final[OnDupAction] = OnDupAction.RAISE
|
||||
DROP_OLD: t.Final[OnDupAction] = OnDupAction.DROP_OLD
|
||||
DROP_NEW: t.Final[OnDupAction] = OnDupAction.DROP_NEW
|
||||
|
||||
|
||||
class OnDup(t.NamedTuple):
|
||||
r"""A combination of :class:`~bidict.OnDupAction`\s specifying how to handle various types of duplication.
|
||||
|
||||
The :attr:`~OnDup.key` field specifies what action to take when a duplicate key is encountered.
|
||||
|
||||
The :attr:`~OnDup.val` field specifies what action to take when a duplicate value is encountered.
|
||||
|
||||
In the case of both key and value duplication across two different items,
|
||||
only :attr:`~OnDup.val` is used.
|
||||
|
||||
*See also* :ref:`basic-usage:Values Must Be Unique`
|
||||
(https://bidict.rtfd.io/basic-usage.html#values-must-be-unique)
|
||||
"""
|
||||
|
||||
key: OnDupAction = DROP_OLD
|
||||
val: OnDupAction = RAISE
|
||||
|
||||
|
||||
#: Default :class:`OnDup` used for the
|
||||
#: :meth:`~bidict.bidict.__init__`,
|
||||
#: :meth:`~bidict.bidict.__setitem__`, and
|
||||
#: :meth:`~bidict.bidict.update` methods.
|
||||
ON_DUP_DEFAULT: t.Final[OnDup] = OnDup(key=DROP_OLD, val=RAISE)
|
||||
#: An :class:`OnDup` whose members are all :obj:`RAISE`.
|
||||
ON_DUP_RAISE: t.Final[OnDup] = OnDup(key=RAISE, val=RAISE)
|
||||
#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`.
|
||||
ON_DUP_DROP_OLD: t.Final[OnDup] = OnDup(key=DROP_OLD, val=DROP_OLD)
|
||||
36
tapdown/lib/python3.11/site-packages/bidict/_exc.py
Normal file
36
tapdown/lib/python3.11/site-packages/bidict/_exc.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide all bidict exceptions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class BidictException(Exception):
|
||||
"""Base class for bidict exceptions."""
|
||||
|
||||
|
||||
class DuplicationError(BidictException):
|
||||
"""Base class for exceptions raised when uniqueness is violated
|
||||
as per the :attr:`~bidict.RAISE` :class:`~bidict.OnDupAction`.
|
||||
"""
|
||||
|
||||
|
||||
class KeyDuplicationError(DuplicationError):
|
||||
"""Raised when a given key is not unique."""
|
||||
|
||||
|
||||
class ValueDuplicationError(DuplicationError):
|
||||
"""Raised when a given value is not unique."""
|
||||
|
||||
|
||||
class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError):
|
||||
"""Raised when a given item's key and value are not unique.
|
||||
|
||||
That is, its key duplicates that of another item,
|
||||
and its value duplicates that of a different other item.
|
||||
"""
|
||||
50
tapdown/lib/python3.11/site-packages/bidict/_frozen.py
Normal file
50
tapdown/lib/python3.11/site-packages/bidict/_frozen.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: _base.py Current: _frozen.py Next: _bidict.py →
|
||||
# ============================================================================
|
||||
|
||||
"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from ._base import BidictBase
|
||||
from ._typing import KT
|
||||
from ._typing import VT
|
||||
|
||||
|
||||
class frozenbidict(BidictBase[KT, VT]):
|
||||
"""Immutable, hashable bidict type."""
|
||||
|
||||
_hash: int
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
@property
|
||||
def inverse(self) -> frozenbidict[VT, KT]: ...
|
||||
|
||||
@property
|
||||
def inv(self) -> frozenbidict[VT, KT]: ...
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""The hash of this bidict as determined by its items."""
|
||||
if getattr(self, '_hash', None) is None:
|
||||
# The following is like hash(frozenset(self.items()))
|
||||
# but more memory efficient. See also: https://bugs.python.org/issue46684
|
||||
self._hash = t.ItemsView(self)._hash()
|
||||
return self._hash
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: _base.py Current: _frozen.py Next: _bidict.py →
|
||||
# ============================================================================
|
||||
51
tapdown/lib/python3.11/site-packages/bidict/_iter.py
Normal file
51
tapdown/lib/python3.11/site-packages/bidict/_iter.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Functions for iterating over items in a mapping."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from operator import itemgetter
|
||||
|
||||
from ._typing import KT
|
||||
from ._typing import VT
|
||||
from ._typing import ItemsIter
|
||||
from ._typing import Maplike
|
||||
from ._typing import MapOrItems
|
||||
|
||||
|
||||
def iteritems(arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> ItemsIter[KT, VT]:
|
||||
"""Yield the items from *arg* and *kw* in the order given."""
|
||||
if isinstance(arg, t.Mapping):
|
||||
yield from arg.items()
|
||||
elif isinstance(arg, Maplike):
|
||||
yield from ((k, arg[k]) for k in arg.keys())
|
||||
else:
|
||||
yield from arg
|
||||
yield from t.cast(ItemsIter[KT, VT], kw.items())
|
||||
|
||||
|
||||
swap: t.Final = itemgetter(1, 0)
|
||||
|
||||
|
||||
def inverted(arg: MapOrItems[KT, VT]) -> ItemsIter[VT, KT]:
|
||||
"""Yield the inverse items of the provided object.
|
||||
|
||||
If *arg* has a :func:`callable` ``__inverted__`` attribute,
|
||||
return the result of calling it.
|
||||
|
||||
Otherwise, return an iterator over the items in `arg`,
|
||||
inverting each item on the fly.
|
||||
|
||||
*See also* :attr:`bidict.BidirectionalMapping.__inverted__`
|
||||
"""
|
||||
invattr = getattr(arg, '__inverted__', None)
|
||||
if callable(invattr):
|
||||
inv: ItemsIter[VT, KT] = invattr()
|
||||
return inv
|
||||
return map(swap, iteritems(arg))
|
||||
238
tapdown/lib/python3.11/site-packages/bidict/_orderedbase.py
Normal file
238
tapdown/lib/python3.11/site-packages/bidict/_orderedbase.py
Normal file
@@ -0,0 +1,238 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py →
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`OrderedBidictBase`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from weakref import ref as weakref
|
||||
|
||||
from ._base import BidictBase
|
||||
from ._base import Unwrites
|
||||
from ._bidict import bidict
|
||||
from ._iter import iteritems
|
||||
from ._typing import KT
|
||||
from ._typing import MISSING
|
||||
from ._typing import OKT
|
||||
from ._typing import OVT
|
||||
from ._typing import VT
|
||||
from ._typing import MapOrItems
|
||||
|
||||
|
||||
AT = t.TypeVar('AT') # attr type
|
||||
|
||||
|
||||
class WeakAttr(t.Generic[AT]):
|
||||
"""Descriptor to automatically manage (de)referencing the given slot as a weakref.
|
||||
|
||||
See https://docs.python.org/3/howto/descriptor.html#managed-attributes
|
||||
for an intro to using descriptors like this for managed attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, *, slot: str) -> None:
|
||||
self.slot = slot
|
||||
|
||||
def __set__(self, instance: t.Any, value: AT) -> None:
|
||||
setattr(instance, self.slot, weakref(value))
|
||||
|
||||
def __get__(self, instance: t.Any, __owner: t.Any = None) -> AT:
|
||||
return t.cast(AT, getattr(instance, self.slot)())
|
||||
|
||||
|
||||
class Node:
|
||||
"""A node in a circular doubly-linked list
|
||||
used to encode the order of items in an ordered bidict.
|
||||
|
||||
A weak reference to the previous node is stored
|
||||
to avoid creating strong reference cycles.
|
||||
Referencing/dereferencing the weakref is handled automatically by :class:`WeakAttr`.
|
||||
"""
|
||||
|
||||
prv: WeakAttr[Node] = WeakAttr(slot='_prv_weak')
|
||||
__slots__ = ('__weakref__', '_prv_weak', 'nxt')
|
||||
|
||||
nxt: Node | WeakAttr[Node] # Allow subclasses to use a WeakAttr for nxt too (see SentinelNode)
|
||||
|
||||
def __init__(self, prv: Node, nxt: Node) -> None:
|
||||
self.prv = prv
|
||||
self.nxt = nxt
|
||||
|
||||
def unlink(self) -> None:
|
||||
"""Remove self from in between prv and nxt.
|
||||
Self's references to prv and nxt are retained so it can be relinked (see below).
|
||||
"""
|
||||
self.prv.nxt = self.nxt
|
||||
self.nxt.prv = self.prv
|
||||
|
||||
def relink(self) -> None:
|
||||
"""Restore self between prv and nxt after unlinking (see above)."""
|
||||
self.prv.nxt = self.nxt.prv = self
|
||||
|
||||
|
||||
class SentinelNode(Node):
|
||||
"""Special node in a circular doubly-linked list
|
||||
that links the first node with the last node.
|
||||
When its next and previous references point back to itself
|
||||
it represents an empty list.
|
||||
"""
|
||||
|
||||
nxt: WeakAttr[Node] = WeakAttr(slot='_nxt_weak')
|
||||
__slots__ = ('_nxt_weak',)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self, self)
|
||||
|
||||
def iternodes(self, *, reverse: bool = False) -> t.Iterator[Node]:
|
||||
"""Iterator yielding nodes in the requested order."""
|
||||
attr = 'prv' if reverse else 'nxt'
|
||||
node = getattr(self, attr)
|
||||
while node is not self:
|
||||
yield node
|
||||
node = getattr(node, attr)
|
||||
|
||||
def new_last_node(self) -> Node:
|
||||
"""Create and return a new terminal node."""
|
||||
old_last = self.prv
|
||||
new_last = Node(old_last, self)
|
||||
old_last.nxt = self.prv = new_last
|
||||
return new_last
|
||||
|
||||
|
||||
class OrderedBidictBase(BidictBase[KT, VT]):
|
||||
"""Base class implementing an ordered :class:`BidirectionalMapping`."""
|
||||
|
||||
_node_by_korv: bidict[t.Any, Node]
|
||||
_bykey: bool
|
||||
|
||||
def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
|
||||
"""Make a new ordered bidirectional mapping.
|
||||
The signature behaves like that of :class:`dict`.
|
||||
Items passed in are added in the order they are passed,
|
||||
respecting the :attr:`~bidict.BidictBase.on_dup`
|
||||
class attribute in the process.
|
||||
|
||||
The order in which items are inserted is remembered,
|
||||
similar to :class:`collections.OrderedDict`.
|
||||
"""
|
||||
self._sntl = SentinelNode()
|
||||
self._node_by_korv = bidict()
|
||||
self._bykey = True
|
||||
super().__init__(arg, **kw)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
@property
|
||||
def inverse(self) -> OrderedBidictBase[VT, KT]: ...
|
||||
|
||||
@property
|
||||
def inv(self) -> OrderedBidictBase[VT, KT]: ...
|
||||
|
||||
def _make_inverse(self) -> OrderedBidictBase[VT, KT]:
|
||||
inv = t.cast(OrderedBidictBase[VT, KT], super()._make_inverse())
|
||||
inv._sntl = self._sntl
|
||||
inv._node_by_korv = self._node_by_korv
|
||||
inv._bykey = not self._bykey
|
||||
return inv
|
||||
|
||||
def _assoc_node(self, node: Node, key: KT, val: VT) -> None:
|
||||
korv = key if self._bykey else val
|
||||
self._node_by_korv.forceput(korv, node)
|
||||
|
||||
def _dissoc_node(self, node: Node) -> None:
|
||||
del self._node_by_korv.inverse[node]
|
||||
node.unlink()
|
||||
|
||||
def _init_from(self, other: MapOrItems[KT, VT]) -> None:
|
||||
"""See :meth:`BidictBase._init_from`."""
|
||||
super()._init_from(other)
|
||||
bykey = self._bykey
|
||||
korv_by_node = self._node_by_korv.inverse
|
||||
korv_by_node.clear()
|
||||
korv_by_node_set = korv_by_node.__setitem__
|
||||
self._sntl.nxt = self._sntl.prv = self._sntl
|
||||
new_node = self._sntl.new_last_node
|
||||
for k, v in iteritems(other):
|
||||
korv_by_node_set(new_node(), k if bykey else v)
|
||||
|
||||
def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None:
|
||||
"""See :meth:`bidict.BidictBase._spec_write`."""
|
||||
super()._write(newkey, newval, oldkey, oldval, unwrites)
|
||||
assoc, dissoc = self._assoc_node, self._dissoc_node
|
||||
node_by_korv, bykey = self._node_by_korv, self._bykey
|
||||
if oldval is MISSING and oldkey is MISSING: # no key or value duplication
|
||||
# {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5}
|
||||
newnode = self._sntl.new_last_node()
|
||||
assoc(newnode, newkey, newval)
|
||||
if unwrites is not None:
|
||||
unwrites.append((dissoc, newnode))
|
||||
elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items
|
||||
# {0: 1, 2: 3} | {0: 3} => {0: 3}
|
||||
# n1, n2 => n1 (collapse n1 and n2 into n1)
|
||||
# oldkey: 2, oldval: 1, oldnode: n2, newkey: 0, newval: 3, newnode: n1
|
||||
if bykey:
|
||||
oldnode = node_by_korv[oldkey]
|
||||
newnode = node_by_korv[newkey]
|
||||
else:
|
||||
oldnode = node_by_korv[newval]
|
||||
newnode = node_by_korv[oldval]
|
||||
dissoc(oldnode)
|
||||
assoc(newnode, newkey, newval)
|
||||
if unwrites is not None:
|
||||
unwrites.extend((
|
||||
(assoc, newnode, newkey, oldval),
|
||||
(assoc, oldnode, oldkey, newval),
|
||||
(oldnode.relink,),
|
||||
))
|
||||
elif oldval is not MISSING: # just key duplication
|
||||
# {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4}
|
||||
# oldkey: MISSING, oldval: 3, newkey: 2, newval: 4
|
||||
node = node_by_korv[newkey if bykey else oldval]
|
||||
assoc(node, newkey, newval)
|
||||
if unwrites is not None:
|
||||
unwrites.append((assoc, node, newkey, oldval))
|
||||
else:
|
||||
assert oldkey is not MISSING # just value duplication
|
||||
# {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3}
|
||||
# oldkey: 2, oldval: MISSING, newkey: 4, newval: 3
|
||||
node = node_by_korv[oldkey if bykey else newval]
|
||||
assoc(node, newkey, newval)
|
||||
if unwrites is not None:
|
||||
unwrites.append((assoc, node, oldkey, newval))
|
||||
|
||||
def __iter__(self) -> t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in insertion order."""
|
||||
return self._iter(reverse=False)
|
||||
|
||||
def __reversed__(self) -> t.Iterator[KT]:
|
||||
"""Iterator over the contained keys in reverse insertion order."""
|
||||
return self._iter(reverse=True)
|
||||
|
||||
def _iter(self, *, reverse: bool = False) -> t.Iterator[KT]:
|
||||
nodes = self._sntl.iternodes(reverse=reverse)
|
||||
korv_by_node = self._node_by_korv.inverse
|
||||
if self._bykey:
|
||||
for node in nodes:
|
||||
yield korv_by_node[node]
|
||||
else:
|
||||
key_by_val = self._invm
|
||||
for node in nodes:
|
||||
val = korv_by_node[node]
|
||||
yield key_by_val[val]
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py →
|
||||
# ============================================================================
|
||||
172
tapdown/lib/python3.11/site-packages/bidict/_orderedbidict.py
Normal file
172
tapdown/lib/python3.11/site-packages/bidict/_orderedbidict.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# (see comments in __init__.py)
|
||||
# ============================================================================
|
||||
# ← Prev: _orderedbase.py Current: _orderedbidict.py <FIN>
|
||||
# ============================================================================
|
||||
|
||||
|
||||
"""Provide :class:`OrderedBidict`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from collections.abc import Set
|
||||
|
||||
from ._base import BidictKeysView
|
||||
from ._bidict import MutableBidict
|
||||
from ._orderedbase import OrderedBidictBase
|
||||
from ._typing import KT
|
||||
from ._typing import VT
|
||||
|
||||
|
||||
class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]):
|
||||
"""Mutable bidict type that maintains items in insertion order."""
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
|
||||
@property
|
||||
def inverse(self) -> OrderedBidict[VT, KT]: ...
|
||||
|
||||
@property
|
||||
def inv(self) -> OrderedBidict[VT, KT]: ...
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Remove all items."""
|
||||
super().clear()
|
||||
self._node_by_korv.clear()
|
||||
self._sntl.nxt = self._sntl.prv = self._sntl
|
||||
|
||||
def _pop(self, key: KT) -> VT:
|
||||
val = super()._pop(key)
|
||||
node = self._node_by_korv[key if self._bykey else val]
|
||||
self._dissoc_node(node)
|
||||
return val
|
||||
|
||||
def popitem(self, last: bool = True) -> tuple[KT, VT]:
|
||||
"""*b.popitem() → (k, v)*
|
||||
|
||||
If *last* is true,
|
||||
remove and return the most recently added item as a (key, value) pair.
|
||||
Otherwise, remove and return the least recently added item.
|
||||
|
||||
:raises KeyError: if *b* is empty.
|
||||
"""
|
||||
if not self:
|
||||
raise KeyError('OrderedBidict is empty')
|
||||
node = getattr(self._sntl, 'prv' if last else 'nxt')
|
||||
korv = self._node_by_korv.inverse[node]
|
||||
if self._bykey:
|
||||
return korv, self._pop(korv)
|
||||
return self.inverse._pop(korv), korv
|
||||
|
||||
def move_to_end(self, key: KT, last: bool = True) -> None:
|
||||
"""Move the item with the given key to the end if *last* is true, else to the beginning.
|
||||
|
||||
:raises KeyError: if *key* is missing
|
||||
"""
|
||||
korv = key if self._bykey else self._fwdm[key]
|
||||
node = self._node_by_korv[korv]
|
||||
node.prv.nxt = node.nxt
|
||||
node.nxt.prv = node.prv
|
||||
sntl = self._sntl
|
||||
if last:
|
||||
lastnode = sntl.prv
|
||||
node.prv = lastnode
|
||||
node.nxt = sntl
|
||||
sntl.prv = lastnode.nxt = node
|
||||
else:
|
||||
firstnode = sntl.nxt
|
||||
node.prv = sntl
|
||||
node.nxt = firstnode
|
||||
sntl.nxt = firstnode.prv = node
|
||||
|
||||
# Override the keys() and items() implementations inherited from BidictBase,
|
||||
# which may delegate to the backing _fwdm dict, since this is a mutable ordered bidict,
|
||||
# and therefore the ordering of items can get out of sync with the backing mappings
|
||||
# after mutation. (Need not override values() because it delegates to .inverse.keys().)
|
||||
def keys(self) -> t.KeysView[KT]:
|
||||
"""A set-like object providing a view on the contained keys."""
|
||||
return _OrderedBidictKeysView(self)
|
||||
|
||||
def items(self) -> t.ItemsView[KT, VT]:
|
||||
"""A set-like object providing a view on the contained items."""
|
||||
return _OrderedBidictItemsView(self)
|
||||
|
||||
|
||||
# The following MappingView implementations use the __iter__ implementations
|
||||
# inherited from their superclass counterparts in collections.abc, so they
|
||||
# continue to yield items in the correct order even after an OrderedBidict
|
||||
# is mutated. They also provide a __reversed__ implementation, which is not
|
||||
# provided by the collections.abc superclasses.
|
||||
class _OrderedBidictKeysView(BidictKeysView[KT]):
|
||||
_mapping: OrderedBidict[KT, t.Any]
|
||||
|
||||
def __reversed__(self) -> t.Iterator[KT]:
|
||||
return reversed(self._mapping)
|
||||
|
||||
|
||||
class _OrderedBidictItemsView(t.ItemsView[KT, VT]):
|
||||
_mapping: OrderedBidict[KT, VT]
|
||||
|
||||
def __reversed__(self) -> t.Iterator[tuple[KT, VT]]:
|
||||
ob = self._mapping
|
||||
for key in reversed(ob):
|
||||
yield key, ob[key]
|
||||
|
||||
|
||||
# For better performance, make _OrderedBidictKeysView and _OrderedBidictItemsView delegate
|
||||
# to backing dicts for the methods they inherit from collections.abc.Set. (Cannot delegate
|
||||
# for __iter__ and __reversed__ since they are order-sensitive.) See also: https://bugs.python.org/issue46713
|
||||
_OView = t.Union[t.Type[_OrderedBidictKeysView[KT]], t.Type[_OrderedBidictItemsView[KT, t.Any]]]
|
||||
_setmethodnames: t.Iterable[str] = (
|
||||
'__lt__ __le__ __gt__ __ge__ __eq__ __ne__ __sub__ __rsub__ '
|
||||
'__or__ __ror__ __xor__ __rxor__ __and__ __rand__ isdisjoint'
|
||||
).split()
|
||||
|
||||
|
||||
def _override_set_methods_to_use_backing_dict(cls: _OView[KT], viewname: str) -> None:
|
||||
def make_proxy_method(methodname: str) -> t.Any:
|
||||
def method(self: _OrderedBidictKeysView[KT] | _OrderedBidictItemsView[KT, t.Any], *args: t.Any) -> t.Any:
|
||||
fwdm = self._mapping._fwdm
|
||||
if not isinstance(fwdm, dict): # dict view speedup not available, fall back to Set's implementation.
|
||||
return getattr(Set, methodname)(self, *args)
|
||||
fwdm_dict_view = getattr(fwdm, viewname)()
|
||||
fwdm_dict_view_method = getattr(fwdm_dict_view, methodname)
|
||||
if (
|
||||
len(args) != 1
|
||||
or not isinstance((arg := args[0]), self.__class__)
|
||||
or not isinstance(arg._mapping._fwdm, dict)
|
||||
):
|
||||
return fwdm_dict_view_method(*args)
|
||||
# self and arg are both _OrderedBidictKeysViews or _OrderedBidictItemsViews whose bidicts are backed by
|
||||
# a dict. Use arg's backing dict's corresponding view instead of arg. Otherwise, e.g. `ob1.keys()
|
||||
# < ob2.keys()` would give "TypeError: '<' not supported between instances of '_OrderedBidictKeysView' and
|
||||
# '_OrderedBidictKeysView'", because both `dict_keys(ob1).__lt__(ob2.keys()) is NotImplemented` and
|
||||
# `dict_keys(ob2).__gt__(ob1.keys()) is NotImplemented`.
|
||||
arg_dict = arg._mapping._fwdm
|
||||
arg_dict_view = getattr(arg_dict, viewname)()
|
||||
return fwdm_dict_view_method(arg_dict_view)
|
||||
|
||||
method.__name__ = methodname
|
||||
method.__qualname__ = f'{cls.__qualname__}.{methodname}'
|
||||
return method
|
||||
|
||||
for name in _setmethodnames:
|
||||
setattr(cls, name, make_proxy_method(name))
|
||||
|
||||
|
||||
_override_set_methods_to_use_backing_dict(_OrderedBidictKeysView, 'keys')
|
||||
_override_set_methods_to_use_backing_dict(_OrderedBidictItemsView, 'items')
|
||||
|
||||
|
||||
# * Code review nav *
|
||||
# ============================================================================
|
||||
# ← Prev: _orderedbase.py Current: _orderedbidict.py <FIN>
|
||||
# ============================================================================
|
||||
49
tapdown/lib/python3.11/site-packages/bidict/_typing.py
Normal file
49
tapdown/lib/python3.11/site-packages/bidict/_typing.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
"""Provide typing-related objects."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from enum import Enum
|
||||
|
||||
|
||||
KT = t.TypeVar('KT')
|
||||
VT = t.TypeVar('VT')
|
||||
VT_co = t.TypeVar('VT_co', covariant=True)
|
||||
|
||||
|
||||
Items = t.Iterable[t.Tuple[KT, VT]]
|
||||
|
||||
|
||||
@t.runtime_checkable
|
||||
class Maplike(t.Protocol[KT, VT_co]):
|
||||
"""Like typeshed's SupportsKeysAndGetItem, but usable at runtime."""
|
||||
|
||||
def keys(self) -> t.Iterable[KT]: ...
|
||||
|
||||
def __getitem__(self, __key: KT) -> VT_co: ...
|
||||
|
||||
|
||||
MapOrItems = t.Union[Maplike[KT, VT], Items[KT, VT]]
|
||||
MappOrItems = t.Union[t.Mapping[KT, VT], Items[KT, VT]]
|
||||
ItemsIter = t.Iterator[t.Tuple[KT, VT]]
|
||||
|
||||
|
||||
class MissingT(Enum):
|
||||
"""Sentinel used to represent none/missing when None itself can't be used."""
|
||||
|
||||
MISSING = 'MISSING'
|
||||
|
||||
|
||||
MISSING: t.Final[t.Literal[MissingT.MISSING]] = MissingT.MISSING
|
||||
OKT = t.Union[KT, MissingT] #: optional key type
|
||||
OVT = t.Union[VT, MissingT] #: optional value type
|
||||
|
||||
DT = t.TypeVar('DT') #: for default arguments
|
||||
ODT = t.Union[DT, MissingT] #: optional default arg type
|
||||
14
tapdown/lib/python3.11/site-packages/bidict/metadata.py
Normal file
14
tapdown/lib/python3.11/site-packages/bidict/metadata.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
|
||||
#
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""Define bidict package metadata."""
|
||||
|
||||
__version__ = '0.23.1'
|
||||
__author__ = {'name': 'Joshua Bronson', 'email': 'jabronson@gmail.com'}
|
||||
__copyright__ = '© 2009-2024 Joshua Bronson'
|
||||
__description__ = 'The bidirectional mapping library for Python.'
|
||||
__license__ = 'MPL 2.0'
|
||||
__url__ = 'https://bidict.readthedocs.io'
|
||||
1
tapdown/lib/python3.11/site-packages/bidict/py.typed
Normal file
1
tapdown/lib/python3.11/site-packages/bidict/py.typed
Normal file
@@ -0,0 +1 @@
|
||||
PEP-561 marker.
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,20 @@
|
||||
Copyright 2010 Jason Kirtland
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1,60 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: blinker
|
||||
Version: 1.9.0
|
||||
Summary: Fast, simple object-to-object and broadcast signaling
|
||||
Author: Jason Kirtland
|
||||
Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://blinker.readthedocs.io
|
||||
Project-URL: Source, https://github.com/pallets-eco/blinker/
|
||||
|
||||
# Blinker
|
||||
|
||||
Blinker provides a fast dispatching system that allows any number of
|
||||
interested parties to subscribe to events, or "signals".
|
||||
|
||||
|
||||
## Pallets Community Ecosystem
|
||||
|
||||
> [!IMPORTANT]\
|
||||
> This project is part of the Pallets Community Ecosystem. Pallets is the open
|
||||
> source organization that maintains Flask; Pallets-Eco enables community
|
||||
> maintenance of related projects. If you are interested in helping maintain
|
||||
> this project, please reach out on [the Pallets Discord server][discord].
|
||||
>
|
||||
> [discord]: https://discord.gg/pallets
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
Signal receivers can subscribe to specific senders or receive signals
|
||||
sent by any sender.
|
||||
|
||||
```pycon
|
||||
>>> from blinker import signal
|
||||
>>> started = signal('round-started')
|
||||
>>> def each(round):
|
||||
... print(f"Round {round}")
|
||||
...
|
||||
>>> started.connect(each)
|
||||
|
||||
>>> def round_two(round):
|
||||
... print("This is round two.")
|
||||
...
|
||||
>>> started.connect(round_two, sender=2)
|
||||
|
||||
>>> for round in range(1, 4):
|
||||
... started.send(round)
|
||||
...
|
||||
Round 1!
|
||||
Round 2!
|
||||
This is round two.
|
||||
Round 3!
|
||||
```
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
|
||||
blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633
|
||||
blinker-1.9.0.dist-info/RECORD,,
|
||||
blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
||||
blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317
|
||||
blinker/__pycache__/__init__.cpython-311.pyc,,
|
||||
blinker/__pycache__/_utilities.cpython-311.pyc,,
|
||||
blinker/__pycache__/base.cpython-311.pyc,,
|
||||
blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675
|
||||
blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132
|
||||
blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.10.1
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
17
tapdown/lib/python3.11/site-packages/blinker/__init__.py
Normal file
17
tapdown/lib/python3.11/site-packages/blinker/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import ANY
|
||||
from .base import default_namespace
|
||||
from .base import NamedSignal
|
||||
from .base import Namespace
|
||||
from .base import Signal
|
||||
from .base import signal
|
||||
|
||||
__all__ = [
|
||||
"ANY",
|
||||
"default_namespace",
|
||||
"NamedSignal",
|
||||
"Namespace",
|
||||
"Signal",
|
||||
"signal",
|
||||
]
|
||||
64
tapdown/lib/python3.11/site-packages/blinker/_utilities.py
Normal file
64
tapdown/lib/python3.11/site-packages/blinker/_utilities.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import inspect
|
||||
import typing as t
|
||||
from weakref import ref
|
||||
from weakref import WeakMethod
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
class Symbol:
|
||||
"""A constant symbol, nicer than ``object()``. Repeated calls return the
|
||||
same instance.
|
||||
|
||||
>>> Symbol('foo') is Symbol('foo')
|
||||
True
|
||||
>>> Symbol('foo')
|
||||
foo
|
||||
"""
|
||||
|
||||
symbols: t.ClassVar[dict[str, Symbol]] = {}
|
||||
|
||||
def __new__(cls, name: str) -> Symbol:
|
||||
if name in cls.symbols:
|
||||
return cls.symbols[name]
|
||||
|
||||
obj = super().__new__(cls)
|
||||
cls.symbols[name] = obj
|
||||
return obj
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def __getnewargs__(self) -> tuple[t.Any, ...]:
|
||||
return (self.name,)
|
||||
|
||||
|
||||
def make_id(obj: object) -> c.Hashable:
|
||||
"""Get a stable identifier for a receiver or sender, to be used as a dict
|
||||
key or in a set.
|
||||
"""
|
||||
if inspect.ismethod(obj):
|
||||
# The id of a bound method is not stable, but the id of the unbound
|
||||
# function and instance are.
|
||||
return id(obj.__func__), id(obj.__self__)
|
||||
|
||||
if isinstance(obj, (str, int)):
|
||||
# Instances with the same value always compare equal and have the same
|
||||
# hash, even if the id may change.
|
||||
return obj
|
||||
|
||||
# Assume other types are not hashable but will always be the same instance.
|
||||
return id(obj)
|
||||
|
||||
|
||||
def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]:
|
||||
if inspect.ismethod(obj):
|
||||
return WeakMethod(obj, callback) # type: ignore[arg-type, return-value]
|
||||
|
||||
return ref(obj, callback)
|
||||
512
tapdown/lib/python3.11/site-packages/blinker/base.py
Normal file
512
tapdown/lib/python3.11/site-packages/blinker/base.py
Normal file
@@ -0,0 +1,512 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import sys
|
||||
import typing as t
|
||||
import weakref
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from functools import cached_property
|
||||
from inspect import iscoroutinefunction
|
||||
|
||||
from ._utilities import make_id
|
||||
from ._utilities import make_ref
|
||||
from ._utilities import Symbol
|
||||
|
||||
F = t.TypeVar("F", bound=c.Callable[..., t.Any])
|
||||
|
||||
ANY = Symbol("ANY")
|
||||
"""Symbol for "any sender"."""
|
||||
|
||||
ANY_ID = 0
|
||||
|
||||
|
||||
class Signal:
|
||||
"""A notification emitter.
|
||||
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
ANY = ANY
|
||||
"""An alias for the :data:`~blinker.ANY` sender symbol."""
|
||||
|
||||
set_class: type[set[t.Any]] = set
|
||||
"""The set class to use for tracking connected receivers and senders.
|
||||
Python's ``set`` is unordered. If receivers must be dispatched in the order
|
||||
they were connected, an ordered set implementation can be used.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def receiver_connected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`connect` call.
|
||||
|
||||
The signal sender is the signal instance, and the :meth:`connect`
|
||||
arguments are passed through: ``receiver``, ``sender``, and ``weak``.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver connects.")
|
||||
|
||||
@cached_property
|
||||
def receiver_disconnected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`disconnect` call.
|
||||
|
||||
The sender is the signal instance, and the :meth:`disconnect` arguments
|
||||
are passed through: ``receiver`` and ``sender``.
|
||||
|
||||
This signal is emitted **only** when :meth:`disconnect` is called
|
||||
explicitly. This signal cannot be emitted by an automatic disconnect
|
||||
when a weakly referenced receiver or sender goes out of scope, as the
|
||||
instance is no longer be available to be used as the sender for this
|
||||
signal.
|
||||
|
||||
An alternative approach is available by subscribing to
|
||||
:attr:`receiver_connected` and setting up a custom weakref cleanup
|
||||
callback on weak receivers and senders.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver disconnects.")
|
||||
|
||||
def __init__(self, doc: str | None = None) -> None:
|
||||
if doc:
|
||||
self.__doc__ = doc
|
||||
|
||||
self.receivers: dict[
|
||||
t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any]
|
||||
] = {}
|
||||
"""The map of connected receivers. Useful to quickly check if any
|
||||
receivers are connected to the signal: ``if s.receivers:``. The
|
||||
structure and data is not part of the public API, but checking its
|
||||
boolean value is.
|
||||
"""
|
||||
|
||||
self.is_muted: bool = False
|
||||
self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {}
|
||||
|
||||
def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F:
|
||||
"""Connect ``receiver`` to be called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.
|
||||
"""
|
||||
receiver_id = make_id(receiver)
|
||||
sender_id = ANY_ID if sender is ANY else make_id(sender)
|
||||
|
||||
if weak:
|
||||
self.receivers[receiver_id] = make_ref(
|
||||
receiver, self._make_cleanup_receiver(receiver_id)
|
||||
)
|
||||
else:
|
||||
self.receivers[receiver_id] = receiver
|
||||
|
||||
self._by_sender[sender_id].add(receiver_id)
|
||||
self._by_receiver[receiver_id].add(sender_id)
|
||||
|
||||
if sender is not ANY and sender_id not in self._weak_senders:
|
||||
# store a cleanup for weakref-able senders
|
||||
try:
|
||||
self._weak_senders[sender_id] = make_ref(
|
||||
sender, self._make_cleanup_sender(sender_id)
|
||||
)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
|
||||
try:
|
||||
self.receiver_connected.send(
|
||||
self, receiver=receiver, sender=sender, weak=weak
|
||||
)
|
||||
except TypeError:
|
||||
# TODO no explanation or test for this
|
||||
self.disconnect(receiver, sender)
|
||||
raise
|
||||
|
||||
return receiver
|
||||
|
||||
def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]:
|
||||
"""Connect the decorated function to be called when the signal is sent
|
||||
by ``sender``.
|
||||
|
||||
The decorated function will be called when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument along
|
||||
with any extra keyword arguments.
|
||||
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.=
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
|
||||
def decorator(fn: F) -> F:
|
||||
self.connect(fn, sender, weak)
|
||||
return fn
|
||||
|
||||
return decorator
|
||||
|
||||
@contextmanager
|
||||
def connected_to(
|
||||
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
|
||||
) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily connects ``receiver`` to the
|
||||
signal while a ``with`` block executes. When the block exits, the
|
||||
receiver is disconnected. Useful for tests.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
self.connect(receiver, sender=sender, weak=False)
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.disconnect(receiver)
|
||||
|
||||
@contextmanager
|
||||
def muted(self) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily disables the signal. No receivers
|
||||
will be called if the signal is sent, until the ``with`` block exits.
|
||||
Useful for tests.
|
||||
"""
|
||||
self.is_muted = True
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.is_muted = False
|
||||
|
||||
def send(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_async_wrapper: c.Callable[
|
||||
[c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Call all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _async_wrapper: Will be called on any receivers that are async
|
||||
coroutines to turn them into sync callables. For example, could run
|
||||
the receiver with an event loop.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionchanged:: 1.7
|
||||
Added the ``_async_wrapper`` argument.
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if iscoroutinefunction(receiver):
|
||||
if _async_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a coroutine function.")
|
||||
|
||||
result = _async_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
async def send_async(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_sync_wrapper: c.Callable[
|
||||
[c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Await all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _sync_wrapper: Will be called on any receivers that are sync
|
||||
callables to turn them into async coroutines. For example,
|
||||
could call the receiver in a thread.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if not iscoroutinefunction(receiver):
|
||||
if _sync_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a non-coroutine function.")
|
||||
|
||||
result = await _sync_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = await receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
def has_receivers_for(self, sender: t.Any) -> bool:
|
||||
"""Check if there is at least one receiver that will be called with the
|
||||
given ``sender``. A receiver connected to :data:`ANY` will always be
|
||||
called, regardless of sender. Does not check if weakly referenced
|
||||
receivers are still live. See :meth:`receivers_for` for a stronger
|
||||
search.
|
||||
|
||||
:param sender: Check for receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
if not self.receivers:
|
||||
return False
|
||||
|
||||
if self._by_sender[ANY_ID]:
|
||||
return True
|
||||
|
||||
if sender is ANY:
|
||||
return False
|
||||
|
||||
return make_id(sender) in self._by_sender
|
||||
|
||||
def receivers_for(
|
||||
self, sender: t.Any
|
||||
) -> c.Generator[c.Callable[..., t.Any], None, None]:
|
||||
"""Yield each receiver to be called for ``sender``, in addition to those
|
||||
to be called for :data:`ANY`. Weakly referenced receivers that are not
|
||||
live will be disconnected and skipped.
|
||||
|
||||
:param sender: Yield receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
# TODO: test receivers_for(ANY)
|
||||
if not self.receivers:
|
||||
return
|
||||
|
||||
sender_id = make_id(sender)
|
||||
|
||||
if sender_id in self._by_sender:
|
||||
ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
|
||||
else:
|
||||
ids = self._by_sender[ANY_ID].copy()
|
||||
|
||||
for receiver_id in ids:
|
||||
receiver = self.receivers.get(receiver_id)
|
||||
|
||||
if receiver is None:
|
||||
continue
|
||||
|
||||
if isinstance(receiver, weakref.ref):
|
||||
strong = receiver()
|
||||
|
||||
if strong is None:
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
continue
|
||||
|
||||
yield strong
|
||||
else:
|
||||
yield receiver
|
||||
|
||||
def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None:
|
||||
"""Disconnect ``receiver`` from being called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: A connected receiver callable.
|
||||
:param sender: Disconnect from only this sender. By default, disconnect
|
||||
from all senders.
|
||||
"""
|
||||
sender_id: c.Hashable
|
||||
|
||||
if sender is ANY:
|
||||
sender_id = ANY_ID
|
||||
else:
|
||||
sender_id = make_id(sender)
|
||||
|
||||
receiver_id = make_id(receiver)
|
||||
self._disconnect(receiver_id, sender_id)
|
||||
|
||||
if (
|
||||
"receiver_disconnected" in self.__dict__
|
||||
and self.receiver_disconnected.receivers
|
||||
):
|
||||
self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
|
||||
|
||||
def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None:
|
||||
if sender_id == ANY_ID:
|
||||
if self._by_receiver.pop(receiver_id, None) is not None:
|
||||
for bucket in self._by_sender.values():
|
||||
bucket.discard(receiver_id)
|
||||
|
||||
self.receivers.pop(receiver_id, None)
|
||||
else:
|
||||
self._by_sender[sender_id].discard(receiver_id)
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
def _make_cleanup_receiver(
|
||||
self, receiver_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]:
|
||||
"""Create a callback function to disconnect a weakly referenced
|
||||
receiver when it is garbage collected.
|
||||
"""
|
||||
|
||||
def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None:
|
||||
# If the interpreter is shutting down, disconnecting can result in a
|
||||
# weird ignored exception. Don't call it in that case.
|
||||
if not sys.is_finalizing():
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _make_cleanup_sender(
|
||||
self, sender_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[t.Any]], None]:
|
||||
"""Create a callback function to disconnect all receivers for a weakly
|
||||
referenced sender when it is garbage collected.
|
||||
"""
|
||||
assert sender_id != ANY_ID
|
||||
|
||||
def cleanup(ref: weakref.ref[t.Any]) -> None:
|
||||
self._weak_senders.pop(sender_id, None)
|
||||
|
||||
for receiver_id in self._by_sender.pop(sender_id, ()):
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _cleanup_bookkeeping(self) -> None:
|
||||
"""Prune unused sender/receiver bookkeeping. Not threadsafe.
|
||||
|
||||
Connecting & disconnecting leaves behind a small amount of bookkeeping
|
||||
data. Typical workloads using Blinker, for example in most web apps,
|
||||
Flask, CLI scripts, etc., are not adversely affected by this
|
||||
bookkeeping.
|
||||
|
||||
With a long-running process performing dynamic signal routing with high
|
||||
volume, e.g. connecting to function closures, senders are all unique
|
||||
object instances. Doing all of this over and over may cause memory usage
|
||||
to grow due to extraneous bookkeeping. (An empty ``set`` for each stale
|
||||
sender/receiver pair.)
|
||||
|
||||
This method will prune that bookkeeping away, with the caveat that such
|
||||
pruning is not threadsafe. The risk is that cleanup of a fully
|
||||
disconnected receiver/sender pair occurs while another thread is
|
||||
connecting that same pair. If you are in the highly dynamic, unique
|
||||
receiver/sender situation that has lead you to this method, that failure
|
||||
mode is perhaps not a big deal for you.
|
||||
"""
|
||||
for mapping in (self._by_sender, self._by_receiver):
|
||||
for ident, bucket in list(mapping.items()):
|
||||
if not bucket:
|
||||
mapping.pop(ident, None)
|
||||
|
||||
def _clear_state(self) -> None:
|
||||
"""Disconnect all receivers and senders. Useful for tests."""
|
||||
self._weak_senders.clear()
|
||||
self.receivers.clear()
|
||||
self._by_sender.clear()
|
||||
self._by_receiver.clear()
|
||||
|
||||
|
||||
class NamedSignal(Signal):
|
||||
"""A named generic notification emitter. The name is not used by the signal
|
||||
itself, but matches the key in the :class:`Namespace` that it belongs to.
|
||||
|
||||
:param name: The name of the signal within the namespace.
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, doc: str | None = None) -> None:
|
||||
super().__init__(doc)
|
||||
|
||||
#: The name of this signal.
|
||||
self.name: str = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
base = super().__repr__()
|
||||
return f"{base[:-1]}; {self.name!r}>" # noqa: E702
|
||||
|
||||
|
||||
class Namespace(dict[str, NamedSignal]):
|
||||
"""A dict mapping names to signals."""
|
||||
|
||||
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
|
||||
"""Return the :class:`NamedSignal` for the given ``name``, creating it
|
||||
if required. Repeated calls with the same name return the same signal.
|
||||
|
||||
:param name: The name of the signal.
|
||||
:param doc: The docstring of the signal.
|
||||
"""
|
||||
if name not in self:
|
||||
self[name] = NamedSignal(name, doc)
|
||||
|
||||
return self[name]
|
||||
|
||||
|
||||
class _PNamespaceSignal(t.Protocol):
|
||||
def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ...
|
||||
|
||||
|
||||
default_namespace: Namespace = Namespace()
|
||||
"""A default :class:`Namespace` for creating named signals. :func:`signal`
|
||||
creates a :class:`NamedSignal` in this namespace.
|
||||
"""
|
||||
|
||||
signal: _PNamespaceSignal = default_namespace.signal
|
||||
"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given
|
||||
``name``, creating it if required. Repeated calls with the same name return the
|
||||
same signal.
|
||||
"""
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,84 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: click
|
||||
Version: 8.3.0
|
||||
Summary: Composable command line interface toolkit
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.10
|
||||
Description-Content-Type: text/markdown
|
||||
License-Expression: BSD-3-Clause
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
License-File: LICENSE.txt
|
||||
Requires-Dist: colorama; platform_system == 'Windows'
|
||||
Project-URL: Changes, https://click.palletsprojects.com/page/changes/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://click.palletsprojects.com/
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Source, https://github.com/pallets/click/
|
||||
|
||||
<div align="center"><img src="https://raw.githubusercontent.com/pallets/click/refs/heads/stable/docs/_static/click-name.svg" alt="" height="150"></div>
|
||||
|
||||
# Click
|
||||
|
||||
Click is a Python package for creating beautiful command line interfaces
|
||||
in a composable way with as little code as necessary. It's the "Command
|
||||
Line Interface Creation Kit". It's highly configurable but comes with
|
||||
sensible defaults out of the box.
|
||||
|
||||
It aims to make the process of writing command line tools quick and fun
|
||||
while also preventing any frustration caused by the inability to
|
||||
implement an intended CLI API.
|
||||
|
||||
Click in three points:
|
||||
|
||||
- Arbitrary nesting of commands
|
||||
- Automatic help page generation
|
||||
- Supports lazy loading of subcommands at runtime
|
||||
|
||||
|
||||
## A Simple Example
|
||||
|
||||
```python
|
||||
import click
|
||||
|
||||
@click.command()
|
||||
@click.option("--count", default=1, help="Number of greetings.")
|
||||
@click.option("--name", prompt="Your name", help="The person to greet.")
|
||||
def hello(count, name):
|
||||
"""Simple program that greets NAME for a total of COUNT times."""
|
||||
for _ in range(count):
|
||||
click.echo(f"Hello, {name}!")
|
||||
|
||||
if __name__ == '__main__':
|
||||
hello()
|
||||
```
|
||||
|
||||
```
|
||||
$ python hello.py --count=3
|
||||
Your name: Click
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
```
|
||||
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports Click and other popular
|
||||
packages. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, [please
|
||||
donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
||||
|
||||
## Contributing
|
||||
|
||||
See our [detailed contributing documentation][contrib] for many ways to
|
||||
contribute, including reporting issues, requesting features, asking or answering
|
||||
questions, and making PRs.
|
||||
|
||||
[contrib]: https://palletsprojects.com/contributing/
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
click-8.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
click-8.3.0.dist-info/METADATA,sha256=P6vpEHZ_MLBt4SO2eB-QaadcOdiznkzaZtJImRo7_V4,2621
|
||||
click-8.3.0.dist-info/RECORD,,
|
||||
click-8.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
||||
click-8.3.0.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
||||
click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473
|
||||
click/__pycache__/__init__.cpython-311.pyc,,
|
||||
click/__pycache__/_compat.cpython-311.pyc,,
|
||||
click/__pycache__/_termui_impl.cpython-311.pyc,,
|
||||
click/__pycache__/_textwrap.cpython-311.pyc,,
|
||||
click/__pycache__/_utils.cpython-311.pyc,,
|
||||
click/__pycache__/_winconsole.cpython-311.pyc,,
|
||||
click/__pycache__/core.cpython-311.pyc,,
|
||||
click/__pycache__/decorators.cpython-311.pyc,,
|
||||
click/__pycache__/exceptions.cpython-311.pyc,,
|
||||
click/__pycache__/formatting.cpython-311.pyc,,
|
||||
click/__pycache__/globals.cpython-311.pyc,,
|
||||
click/__pycache__/parser.cpython-311.pyc,,
|
||||
click/__pycache__/shell_completion.cpython-311.pyc,,
|
||||
click/__pycache__/termui.cpython-311.pyc,,
|
||||
click/__pycache__/testing.cpython-311.pyc,,
|
||||
click/__pycache__/types.cpython-311.pyc,,
|
||||
click/__pycache__/utils.cpython-311.pyc,,
|
||||
click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693
|
||||
click/_termui_impl.py,sha256=ktpAHyJtNkhyR-x64CQFD6xJQI11fTA3qg2AV3iCToU,26799
|
||||
click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400
|
||||
click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943
|
||||
click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465
|
||||
click/core.py,sha256=1A5T8UoAXklIGPTJ83_DJbVi35ehtJS2FTkP_wQ7es0,128855
|
||||
click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461
|
||||
click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954
|
||||
click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730
|
||||
click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923
|
||||
click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010
|
||||
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994
|
||||
click/termui.py,sha256=vAYrKC2a7f_NfEIhAThEVYfa__ib5XQbTSCGtJlABRA,30847
|
||||
click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102
|
||||
click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927
|
||||
click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.12.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@@ -0,0 +1,28 @@
|
||||
Copyright 2014 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
123
tapdown/lib/python3.11/site-packages/click/__init__.py
Normal file
123
tapdown/lib/python3.11/site-packages/click/__init__.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
Click is a simple Python module inspired by the stdlib optparse to make
|
||||
writing command line scripts fun. Unlike other modules, it's based
|
||||
around a simple API that does not come with too much magic and is
|
||||
composable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .core import Argument as Argument
|
||||
from .core import Command as Command
|
||||
from .core import CommandCollection as CommandCollection
|
||||
from .core import Context as Context
|
||||
from .core import Group as Group
|
||||
from .core import Option as Option
|
||||
from .core import Parameter as Parameter
|
||||
from .decorators import argument as argument
|
||||
from .decorators import command as command
|
||||
from .decorators import confirmation_option as confirmation_option
|
||||
from .decorators import group as group
|
||||
from .decorators import help_option as help_option
|
||||
from .decorators import make_pass_decorator as make_pass_decorator
|
||||
from .decorators import option as option
|
||||
from .decorators import pass_context as pass_context
|
||||
from .decorators import pass_obj as pass_obj
|
||||
from .decorators import password_option as password_option
|
||||
from .decorators import version_option as version_option
|
||||
from .exceptions import Abort as Abort
|
||||
from .exceptions import BadArgumentUsage as BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage as BadOptionUsage
|
||||
from .exceptions import BadParameter as BadParameter
|
||||
from .exceptions import ClickException as ClickException
|
||||
from .exceptions import FileError as FileError
|
||||
from .exceptions import MissingParameter as MissingParameter
|
||||
from .exceptions import NoSuchOption as NoSuchOption
|
||||
from .exceptions import UsageError as UsageError
|
||||
from .formatting import HelpFormatter as HelpFormatter
|
||||
from .formatting import wrap_text as wrap_text
|
||||
from .globals import get_current_context as get_current_context
|
||||
from .termui import clear as clear
|
||||
from .termui import confirm as confirm
|
||||
from .termui import echo_via_pager as echo_via_pager
|
||||
from .termui import edit as edit
|
||||
from .termui import getchar as getchar
|
||||
from .termui import launch as launch
|
||||
from .termui import pause as pause
|
||||
from .termui import progressbar as progressbar
|
||||
from .termui import prompt as prompt
|
||||
from .termui import secho as secho
|
||||
from .termui import style as style
|
||||
from .termui import unstyle as unstyle
|
||||
from .types import BOOL as BOOL
|
||||
from .types import Choice as Choice
|
||||
from .types import DateTime as DateTime
|
||||
from .types import File as File
|
||||
from .types import FLOAT as FLOAT
|
||||
from .types import FloatRange as FloatRange
|
||||
from .types import INT as INT
|
||||
from .types import IntRange as IntRange
|
||||
from .types import ParamType as ParamType
|
||||
from .types import Path as Path
|
||||
from .types import STRING as STRING
|
||||
from .types import Tuple as Tuple
|
||||
from .types import UNPROCESSED as UNPROCESSED
|
||||
from .types import UUID as UUID
|
||||
from .utils import echo as echo
|
||||
from .utils import format_filename as format_filename
|
||||
from .utils import get_app_dir as get_app_dir
|
||||
from .utils import get_binary_stream as get_binary_stream
|
||||
from .utils import get_text_stream as get_text_stream
|
||||
from .utils import open_file as open_file
|
||||
|
||||
|
||||
def __getattr__(name: str) -> object:
|
||||
import warnings
|
||||
|
||||
if name == "BaseCommand":
|
||||
from .core import _BaseCommand
|
||||
|
||||
warnings.warn(
|
||||
"'BaseCommand' is deprecated and will be removed in Click 9.0. Use"
|
||||
" 'Command' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _BaseCommand
|
||||
|
||||
if name == "MultiCommand":
|
||||
from .core import _MultiCommand
|
||||
|
||||
warnings.warn(
|
||||
"'MultiCommand' is deprecated and will be removed in Click 9.0. Use"
|
||||
" 'Group' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _MultiCommand
|
||||
|
||||
if name == "OptionParser":
|
||||
from .parser import _OptionParser
|
||||
|
||||
warnings.warn(
|
||||
"'OptionParser' is deprecated and will be removed in Click 9.0. The"
|
||||
" old parser is available in 'optparse'.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _OptionParser
|
||||
|
||||
if name == "__version__":
|
||||
import importlib.metadata
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The '__version__' attribute is deprecated and will be removed in"
|
||||
" Click 9.1. Use feature detection or"
|
||||
" 'importlib.metadata.version(\"click\")' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return importlib.metadata.version("click")
|
||||
|
||||
raise AttributeError(name)
|
||||
622
tapdown/lib/python3.11/site-packages/click/_compat.py
Normal file
622
tapdown/lib/python3.11/site-packages/click/_compat.py
Normal file
@@ -0,0 +1,622 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
import collections.abc as cabc
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import typing as t
|
||||
from types import TracebackType
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
WIN = sys.platform.startswith("win")
|
||||
auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None
|
||||
_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
|
||||
|
||||
|
||||
def _make_text_stream(
|
||||
stream: t.BinaryIO,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
if encoding is None:
|
||||
encoding = get_best_encoding(stream)
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
return _NonClosingTextIOWrapper(
|
||||
stream,
|
||||
encoding,
|
||||
errors,
|
||||
line_buffering=True,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def is_ascii_encoding(encoding: str) -> bool:
|
||||
"""Checks if a given encoding is ascii."""
|
||||
try:
|
||||
return codecs.lookup(encoding).name == "ascii"
|
||||
except LookupError:
|
||||
return False
|
||||
|
||||
|
||||
def get_best_encoding(stream: t.IO[t.Any]) -> str:
|
||||
"""Returns the default stream encoding if not found."""
|
||||
rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
|
||||
if is_ascii_encoding(rv):
|
||||
return "utf-8"
|
||||
return rv
|
||||
|
||||
|
||||
class _NonClosingTextIOWrapper(io.TextIOWrapper):
|
||||
def __init__(
|
||||
self,
|
||||
stream: t.BinaryIO,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
**extra: t.Any,
|
||||
) -> None:
|
||||
self._stream = stream = t.cast(
|
||||
t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
|
||||
)
|
||||
super().__init__(stream, encoding, errors, **extra)
|
||||
|
||||
def __del__(self) -> None:
|
||||
try:
|
||||
self.detach()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def isatty(self) -> bool:
|
||||
# https://bitbucket.org/pypy/pypy/issue/1803
|
||||
return self._stream.isatty()
|
||||
|
||||
|
||||
class _FixupStream:
|
||||
"""The new io interface needs more from streams than streams
|
||||
traditionally implement. As such, this fix-up code is necessary in
|
||||
some circumstances.
|
||||
|
||||
The forcing of readable and writable flags are there because some tools
|
||||
put badly patched objects on sys (one such offender are certain version
|
||||
of jupyter notebook).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stream: t.BinaryIO,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
):
|
||||
self._stream = stream
|
||||
self._force_readable = force_readable
|
||||
self._force_writable = force_writable
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._stream, name)
|
||||
|
||||
def read1(self, size: int) -> bytes:
|
||||
f = getattr(self._stream, "read1", None)
|
||||
|
||||
if f is not None:
|
||||
return t.cast(bytes, f(size))
|
||||
|
||||
return self._stream.read(size)
|
||||
|
||||
def readable(self) -> bool:
|
||||
if self._force_readable:
|
||||
return True
|
||||
x = getattr(self._stream, "readable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.read(0)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def writable(self) -> bool:
|
||||
if self._force_writable:
|
||||
return True
|
||||
x = getattr(self._stream, "writable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.write(b"")
|
||||
except Exception:
|
||||
try:
|
||||
self._stream.write(b"")
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def seekable(self) -> bool:
|
||||
x = getattr(self._stream, "seekable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.seek(self._stream.tell())
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||
try:
|
||||
return isinstance(stream.read(0), bytes)
|
||||
except Exception:
|
||||
return default
|
||||
# This happens in some cases where the stream was already
|
||||
# closed. In this case, we assume the default.
|
||||
|
||||
|
||||
def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||
try:
|
||||
stream.write(b"")
|
||||
except Exception:
|
||||
try:
|
||||
stream.write("")
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return default
|
||||
return True
|
||||
|
||||
|
||||
def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None:
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_reader(stream, False):
|
||||
return t.cast(t.BinaryIO, stream)
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_reader(buf, True):
|
||||
return t.cast(t.BinaryIO, buf)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None:
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_writer(stream, False):
|
||||
return t.cast(t.BinaryIO, stream)
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_writer(buf, True):
|
||||
return t.cast(t.BinaryIO, buf)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _stream_is_misconfigured(stream: t.TextIO) -> bool:
|
||||
"""A stream is misconfigured if its encoding is ASCII."""
|
||||
# If the stream does not have an encoding set, we assume it's set
|
||||
# to ASCII. This appears to happen in certain unittest
|
||||
# environments. It's not quite clear what the correct behavior is
|
||||
# but this at least will force Click to recover somehow.
|
||||
return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
|
||||
|
||||
|
||||
def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool:
|
||||
"""A stream attribute is compatible if it is equal to the
|
||||
desired value or the desired value is unset and the attribute
|
||||
has a value.
|
||||
"""
|
||||
stream_value = getattr(stream, attr, None)
|
||||
return stream_value == value or (value is None and stream_value is not None)
|
||||
|
||||
|
||||
def _is_compatible_text_stream(
|
||||
stream: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> bool:
|
||||
"""Check if a stream's encoding and errors attributes are
|
||||
compatible with the desired values.
|
||||
"""
|
||||
return _is_compat_stream_attr(
|
||||
stream, "encoding", encoding
|
||||
) and _is_compat_stream_attr(stream, "errors", errors)
|
||||
|
||||
|
||||
def _force_correct_text_stream(
|
||||
text_stream: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
is_binary: t.Callable[[t.IO[t.Any], bool], bool],
|
||||
find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None],
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
if is_binary(text_stream, False):
|
||||
binary_reader = t.cast(t.BinaryIO, text_stream)
|
||||
else:
|
||||
text_stream = t.cast(t.TextIO, text_stream)
|
||||
# If the stream looks compatible, and won't default to a
|
||||
# misconfigured ascii encoding, return it as-is.
|
||||
if _is_compatible_text_stream(text_stream, encoding, errors) and not (
|
||||
encoding is None and _stream_is_misconfigured(text_stream)
|
||||
):
|
||||
return text_stream
|
||||
|
||||
# Otherwise, get the underlying binary reader.
|
||||
possible_binary_reader = find_binary(text_stream)
|
||||
|
||||
# If that's not possible, silently use the original reader
|
||||
# and get mojibake instead of exceptions.
|
||||
if possible_binary_reader is None:
|
||||
return text_stream
|
||||
|
||||
binary_reader = possible_binary_reader
|
||||
|
||||
# Default errors to replace instead of strict in order to get
|
||||
# something that works.
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
|
||||
# Wrap the binary stream in a text stream with the correct
|
||||
# encoding parameters.
|
||||
return _make_text_stream(
|
||||
binary_reader,
|
||||
encoding,
|
||||
errors,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def _force_correct_text_reader(
|
||||
text_reader: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
) -> t.TextIO:
|
||||
return _force_correct_text_stream(
|
||||
text_reader,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_reader,
|
||||
_find_binary_reader,
|
||||
force_readable=force_readable,
|
||||
)
|
||||
|
||||
|
||||
def _force_correct_text_writer(
|
||||
text_writer: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
return _force_correct_text_stream(
|
||||
text_writer,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_writer,
|
||||
_find_binary_writer,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def get_binary_stdin() -> t.BinaryIO:
|
||||
reader = _find_binary_reader(sys.stdin)
|
||||
if reader is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
|
||||
return reader
|
||||
|
||||
|
||||
def get_binary_stdout() -> t.BinaryIO:
|
||||
writer = _find_binary_writer(sys.stdout)
|
||||
if writer is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
|
||||
return writer
|
||||
|
||||
|
||||
def get_binary_stderr() -> t.BinaryIO:
|
||||
writer = _find_binary_writer(sys.stderr)
|
||||
if writer is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
|
||||
return writer
|
||||
|
||||
|
||||
def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
|
||||
|
||||
|
||||
def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
|
||||
|
||||
|
||||
def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
|
||||
|
||||
|
||||
def _wrap_io_open(
|
||||
file: str | os.PathLike[str] | int,
|
||||
mode: str,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
) -> t.IO[t.Any]:
|
||||
"""Handles not passing ``encoding`` and ``errors`` in binary mode."""
|
||||
if "b" in mode:
|
||||
return open(file, mode)
|
||||
|
||||
return open(file, mode, encoding=encoding, errors=errors)
|
||||
|
||||
|
||||
def open_stream(
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
atomic: bool = False,
|
||||
) -> tuple[t.IO[t.Any], bool]:
|
||||
binary = "b" in mode
|
||||
filename = os.fspath(filename)
|
||||
|
||||
# Standard streams first. These are simple because they ignore the
|
||||
# atomic flag. Use fsdecode to handle Path("-").
|
||||
if os.fsdecode(filename) == "-":
|
||||
if any(m in mode for m in ["w", "a", "x"]):
|
||||
if binary:
|
||||
return get_binary_stdout(), False
|
||||
return get_text_stdout(encoding=encoding, errors=errors), False
|
||||
if binary:
|
||||
return get_binary_stdin(), False
|
||||
return get_text_stdin(encoding=encoding, errors=errors), False
|
||||
|
||||
# Non-atomic writes directly go out through the regular open functions.
|
||||
if not atomic:
|
||||
return _wrap_io_open(filename, mode, encoding, errors), True
|
||||
|
||||
# Some usability stuff for atomic writes
|
||||
if "a" in mode:
|
||||
raise ValueError(
|
||||
"Appending to an existing file is not supported, because that"
|
||||
" would involve an expensive `copy`-operation to a temporary"
|
||||
" file. Open the file in normal `w`-mode and copy explicitly"
|
||||
" if that's what you're after."
|
||||
)
|
||||
if "x" in mode:
|
||||
raise ValueError("Use the `overwrite`-parameter instead.")
|
||||
if "w" not in mode:
|
||||
raise ValueError("Atomic writes only make sense with `w`-mode.")
|
||||
|
||||
# Atomic writes are more complicated. They work by opening a file
|
||||
# as a proxy in the same folder and then using the fdopen
|
||||
# functionality to wrap it in a Python file. Then we wrap it in an
|
||||
# atomic file that moves the file over on close.
|
||||
import errno
|
||||
import random
|
||||
|
||||
try:
|
||||
perm: int | None = os.stat(filename).st_mode
|
||||
except OSError:
|
||||
perm = None
|
||||
|
||||
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
|
||||
|
||||
if binary:
|
||||
flags |= getattr(os, "O_BINARY", 0)
|
||||
|
||||
while True:
|
||||
tmp_filename = os.path.join(
|
||||
os.path.dirname(filename),
|
||||
f".__atomic-write{random.randrange(1 << 32):08x}",
|
||||
)
|
||||
try:
|
||||
fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
|
||||
break
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST or (
|
||||
os.name == "nt"
|
||||
and e.errno == errno.EACCES
|
||||
and os.path.isdir(e.filename)
|
||||
and os.access(e.filename, os.W_OK)
|
||||
):
|
||||
continue
|
||||
raise
|
||||
|
||||
if perm is not None:
|
||||
os.chmod(tmp_filename, perm) # in case perm includes bits in umask
|
||||
|
||||
f = _wrap_io_open(fd, mode, encoding, errors)
|
||||
af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
|
||||
return t.cast(t.IO[t.Any], af), True
|
||||
|
||||
|
||||
class _AtomicFile:
|
||||
def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None:
|
||||
self._f = f
|
||||
self._tmp_filename = tmp_filename
|
||||
self._real_filename = real_filename
|
||||
self.closed = False
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._real_filename
|
||||
|
||||
def close(self, delete: bool = False) -> None:
|
||||
if self.closed:
|
||||
return
|
||||
self._f.close()
|
||||
os.replace(self._tmp_filename, self._real_filename)
|
||||
self.closed = True
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._f, name)
|
||||
|
||||
def __enter__(self) -> _AtomicFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close(delete=exc_type is not None)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._f)
|
||||
|
||||
|
||||
def strip_ansi(value: str) -> str:
|
||||
return _ansi_re.sub("", value)
|
||||
|
||||
|
||||
def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool:
|
||||
while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
|
||||
stream = stream._stream
|
||||
|
||||
return stream.__class__.__module__.startswith("ipykernel.")
|
||||
|
||||
|
||||
def should_strip_ansi(
|
||||
stream: t.IO[t.Any] | None = None, color: bool | None = None
|
||||
) -> bool:
|
||||
if color is None:
|
||||
if stream is None:
|
||||
stream = sys.stdin
|
||||
return not isatty(stream) and not _is_jupyter_kernel_output(stream)
|
||||
return not color
|
||||
|
||||
|
||||
# On Windows, wrap the output streams with colorama to support ANSI
|
||||
# color codes.
|
||||
# NOTE: double check is needed so mypy does not analyze this on Linux
|
||||
if sys.platform.startswith("win") and WIN:
|
||||
from ._winconsole import _get_windows_console_stream
|
||||
|
||||
def _get_argv_encoding() -> str:
|
||||
import locale
|
||||
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
_ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||
|
||||
def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO:
|
||||
"""Support ANSI color and style codes on Windows by wrapping a
|
||||
stream with colorama.
|
||||
"""
|
||||
try:
|
||||
cached = _ansi_stream_wrappers.get(stream)
|
||||
except Exception:
|
||||
cached = None
|
||||
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
import colorama
|
||||
|
||||
strip = should_strip_ansi(stream, color)
|
||||
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
|
||||
rv = t.cast(t.TextIO, ansi_wrapper.stream)
|
||||
_write = rv.write
|
||||
|
||||
def _safe_write(s: str) -> int:
|
||||
try:
|
||||
return _write(s)
|
||||
except BaseException:
|
||||
ansi_wrapper.reset_all()
|
||||
raise
|
||||
|
||||
rv.write = _safe_write # type: ignore[method-assign]
|
||||
|
||||
try:
|
||||
_ansi_stream_wrappers[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return rv
|
||||
|
||||
else:
|
||||
|
||||
def _get_argv_encoding() -> str:
|
||||
return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding()
|
||||
|
||||
def _get_windows_console_stream(
|
||||
f: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> t.TextIO | None:
|
||||
return None
|
||||
|
||||
|
||||
def term_len(x: str) -> int:
|
||||
return len(strip_ansi(x))
|
||||
|
||||
|
||||
def isatty(stream: t.IO[t.Any]) -> bool:
|
||||
try:
|
||||
return stream.isatty()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _make_cached_stream_func(
|
||||
src_func: t.Callable[[], t.TextIO | None],
|
||||
wrapper_func: t.Callable[[], t.TextIO],
|
||||
) -> t.Callable[[], t.TextIO | None]:
|
||||
cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||
|
||||
def func() -> t.TextIO | None:
|
||||
stream = src_func()
|
||||
|
||||
if stream is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
rv = cache.get(stream)
|
||||
except Exception:
|
||||
rv = None
|
||||
if rv is not None:
|
||||
return rv
|
||||
rv = wrapper_func()
|
||||
try:
|
||||
cache[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
|
||||
return func
|
||||
|
||||
|
||||
_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
|
||||
_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
|
||||
_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
|
||||
|
||||
|
||||
binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = {
|
||||
"stdin": get_binary_stdin,
|
||||
"stdout": get_binary_stdout,
|
||||
"stderr": get_binary_stderr,
|
||||
}
|
||||
|
||||
text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = {
|
||||
"stdin": get_text_stdin,
|
||||
"stdout": get_text_stdout,
|
||||
"stderr": get_text_stderr,
|
||||
}
|
||||
847
tapdown/lib/python3.11/site-packages/click/_termui_impl.py
Normal file
847
tapdown/lib/python3.11/site-packages/click/_termui_impl.py
Normal file
@@ -0,0 +1,847 @@
|
||||
"""
|
||||
This module contains implementations for the termui module. To keep the
|
||||
import time of Click down, some infrequently used functionality is
|
||||
placed in this module and only imported as needed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import time
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import CYGWIN
|
||||
from ._compat import get_best_encoding
|
||||
from ._compat import isatty
|
||||
from ._compat import open_stream
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import term_len
|
||||
from ._compat import WIN
|
||||
from .exceptions import ClickException
|
||||
from .utils import echo
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
if os.name == "nt":
|
||||
BEFORE_BAR = "\r"
|
||||
AFTER_BAR = "\n"
|
||||
else:
|
||||
BEFORE_BAR = "\r\033[?25l"
|
||||
AFTER_BAR = "\033[?25h\n"
|
||||
|
||||
|
||||
class ProgressBar(t.Generic[V]):
|
||||
def __init__(
|
||||
self,
|
||||
iterable: cabc.Iterable[V] | None,
|
||||
length: int | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = " ",
|
||||
bar_template: str = "%(bar)s",
|
||||
info_sep: str = " ",
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
label: str | None = None,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
width: int = 30,
|
||||
) -> None:
|
||||
self.fill_char = fill_char
|
||||
self.empty_char = empty_char
|
||||
self.bar_template = bar_template
|
||||
self.info_sep = info_sep
|
||||
self.hidden = hidden
|
||||
self.show_eta = show_eta
|
||||
self.show_percent = show_percent
|
||||
self.show_pos = show_pos
|
||||
self.item_show_func = item_show_func
|
||||
self.label: str = label or ""
|
||||
|
||||
if file is None:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if file is None:
|
||||
file = StringIO()
|
||||
|
||||
self.file = file
|
||||
self.color = color
|
||||
self.update_min_steps = update_min_steps
|
||||
self._completed_intervals = 0
|
||||
self.width: int = width
|
||||
self.autowidth: bool = width == 0
|
||||
|
||||
if length is None:
|
||||
from operator import length_hint
|
||||
|
||||
length = length_hint(iterable, -1)
|
||||
|
||||
if length == -1:
|
||||
length = None
|
||||
if iterable is None:
|
||||
if length is None:
|
||||
raise TypeError("iterable or length is required")
|
||||
iterable = t.cast("cabc.Iterable[V]", range(length))
|
||||
self.iter: cabc.Iterable[V] = iter(iterable)
|
||||
self.length = length
|
||||
self.pos: int = 0
|
||||
self.avg: list[float] = []
|
||||
self.last_eta: float
|
||||
self.start: float
|
||||
self.start = self.last_eta = time.time()
|
||||
self.eta_known: bool = False
|
||||
self.finished: bool = False
|
||||
self.max_width: int | None = None
|
||||
self.entered: bool = False
|
||||
self.current_item: V | None = None
|
||||
self._is_atty = isatty(self.file)
|
||||
self._last_line: str | None = None
|
||||
|
||||
def __enter__(self) -> ProgressBar[V]:
|
||||
self.entered = True
|
||||
self.render_progress()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.render_finish()
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[V]:
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
self.render_progress()
|
||||
return self.generator()
|
||||
|
||||
def __next__(self) -> V:
|
||||
# Iteration is defined in terms of a generator function,
|
||||
# returned by iter(self); use that to define next(). This works
|
||||
# because `self.iter` is an iterable consumed by that generator,
|
||||
# so it is re-entry safe. Calling `next(self.generator())`
|
||||
# twice works and does "what you want".
|
||||
return next(iter(self))
|
||||
|
||||
def render_finish(self) -> None:
|
||||
if self.hidden or not self._is_atty:
|
||||
return
|
||||
self.file.write(AFTER_BAR)
|
||||
self.file.flush()
|
||||
|
||||
@property
|
||||
def pct(self) -> float:
|
||||
if self.finished:
|
||||
return 1.0
|
||||
return min(self.pos / (float(self.length or 1) or 1), 1.0)
|
||||
|
||||
@property
|
||||
def time_per_iteration(self) -> float:
|
||||
if not self.avg:
|
||||
return 0.0
|
||||
return sum(self.avg) / float(len(self.avg))
|
||||
|
||||
@property
|
||||
def eta(self) -> float:
|
||||
if self.length is not None and not self.finished:
|
||||
return self.time_per_iteration * (self.length - self.pos)
|
||||
return 0.0
|
||||
|
||||
def format_eta(self) -> str:
|
||||
if self.eta_known:
|
||||
t = int(self.eta)
|
||||
seconds = t % 60
|
||||
t //= 60
|
||||
minutes = t % 60
|
||||
t //= 60
|
||||
hours = t % 24
|
||||
t //= 24
|
||||
if t > 0:
|
||||
return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
|
||||
else:
|
||||
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
||||
return ""
|
||||
|
||||
def format_pos(self) -> str:
|
||||
pos = str(self.pos)
|
||||
if self.length is not None:
|
||||
pos += f"/{self.length}"
|
||||
return pos
|
||||
|
||||
def format_pct(self) -> str:
|
||||
return f"{int(self.pct * 100): 4}%"[1:]
|
||||
|
||||
def format_bar(self) -> str:
|
||||
if self.length is not None:
|
||||
bar_length = int(self.pct * self.width)
|
||||
bar = self.fill_char * bar_length
|
||||
bar += self.empty_char * (self.width - bar_length)
|
||||
elif self.finished:
|
||||
bar = self.fill_char * self.width
|
||||
else:
|
||||
chars = list(self.empty_char * (self.width or 1))
|
||||
if self.time_per_iteration != 0:
|
||||
chars[
|
||||
int(
|
||||
(math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
|
||||
* self.width
|
||||
)
|
||||
] = self.fill_char
|
||||
bar = "".join(chars)
|
||||
return bar
|
||||
|
||||
def format_progress_line(self) -> str:
|
||||
show_percent = self.show_percent
|
||||
|
||||
info_bits = []
|
||||
if self.length is not None and show_percent is None:
|
||||
show_percent = not self.show_pos
|
||||
|
||||
if self.show_pos:
|
||||
info_bits.append(self.format_pos())
|
||||
if show_percent:
|
||||
info_bits.append(self.format_pct())
|
||||
if self.show_eta and self.eta_known and not self.finished:
|
||||
info_bits.append(self.format_eta())
|
||||
if self.item_show_func is not None:
|
||||
item_info = self.item_show_func(self.current_item)
|
||||
if item_info is not None:
|
||||
info_bits.append(item_info)
|
||||
|
||||
return (
|
||||
self.bar_template
|
||||
% {
|
||||
"label": self.label,
|
||||
"bar": self.format_bar(),
|
||||
"info": self.info_sep.join(info_bits),
|
||||
}
|
||||
).rstrip()
|
||||
|
||||
def render_progress(self) -> None:
|
||||
if self.hidden:
|
||||
return
|
||||
|
||||
if not self._is_atty:
|
||||
# Only output the label once if the output is not a TTY.
|
||||
if self._last_line != self.label:
|
||||
self._last_line = self.label
|
||||
echo(self.label, file=self.file, color=self.color)
|
||||
return
|
||||
|
||||
buf = []
|
||||
# Update width in case the terminal has been resized
|
||||
if self.autowidth:
|
||||
import shutil
|
||||
|
||||
old_width = self.width
|
||||
self.width = 0
|
||||
clutter_length = term_len(self.format_progress_line())
|
||||
new_width = max(0, shutil.get_terminal_size().columns - clutter_length)
|
||||
if new_width < old_width and self.max_width is not None:
|
||||
buf.append(BEFORE_BAR)
|
||||
buf.append(" " * self.max_width)
|
||||
self.max_width = new_width
|
||||
self.width = new_width
|
||||
|
||||
clear_width = self.width
|
||||
if self.max_width is not None:
|
||||
clear_width = self.max_width
|
||||
|
||||
buf.append(BEFORE_BAR)
|
||||
line = self.format_progress_line()
|
||||
line_len = term_len(line)
|
||||
if self.max_width is None or self.max_width < line_len:
|
||||
self.max_width = line_len
|
||||
|
||||
buf.append(line)
|
||||
buf.append(" " * (clear_width - line_len))
|
||||
line = "".join(buf)
|
||||
# Render the line only if it changed.
|
||||
|
||||
if line != self._last_line:
|
||||
self._last_line = line
|
||||
echo(line, file=self.file, color=self.color, nl=False)
|
||||
self.file.flush()
|
||||
|
||||
def make_step(self, n_steps: int) -> None:
|
||||
self.pos += n_steps
|
||||
if self.length is not None and self.pos >= self.length:
|
||||
self.finished = True
|
||||
|
||||
if (time.time() - self.last_eta) < 1.0:
|
||||
return
|
||||
|
||||
self.last_eta = time.time()
|
||||
|
||||
# self.avg is a rolling list of length <= 7 of steps where steps are
|
||||
# defined as time elapsed divided by the total progress through
|
||||
# self.length.
|
||||
if self.pos:
|
||||
step = (time.time() - self.start) / self.pos
|
||||
else:
|
||||
step = time.time() - self.start
|
||||
|
||||
self.avg = self.avg[-6:] + [step]
|
||||
|
||||
self.eta_known = self.length is not None
|
||||
|
||||
def update(self, n_steps: int, current_item: V | None = None) -> None:
|
||||
"""Update the progress bar by advancing a specified number of
|
||||
steps, and optionally set the ``current_item`` for this new
|
||||
position.
|
||||
|
||||
:param n_steps: Number of steps to advance.
|
||||
:param current_item: Optional item to set as ``current_item``
|
||||
for the updated position.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``current_item`` optional parameter.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Only render when the number of steps meets the
|
||||
``update_min_steps`` threshold.
|
||||
"""
|
||||
if current_item is not None:
|
||||
self.current_item = current_item
|
||||
|
||||
self._completed_intervals += n_steps
|
||||
|
||||
if self._completed_intervals >= self.update_min_steps:
|
||||
self.make_step(self._completed_intervals)
|
||||
self.render_progress()
|
||||
self._completed_intervals = 0
|
||||
|
||||
def finish(self) -> None:
|
||||
self.eta_known = False
|
||||
self.current_item = None
|
||||
self.finished = True
|
||||
|
||||
def generator(self) -> cabc.Iterator[V]:
|
||||
"""Return a generator which yields the items added to the bar
|
||||
during construction, and updates the progress bar *after* the
|
||||
yielded block returns.
|
||||
"""
|
||||
# WARNING: the iterator interface for `ProgressBar` relies on
|
||||
# this and only works because this is a simple generator which
|
||||
# doesn't create or manage additional state. If this function
|
||||
# changes, the impact should be evaluated both against
|
||||
# `iter(bar)` and `next(bar)`. `next()` in particular may call
|
||||
# `self.generator()` repeatedly, and this must remain safe in
|
||||
# order for that interface to work.
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
|
||||
if not self._is_atty:
|
||||
yield from self.iter
|
||||
else:
|
||||
for rv in self.iter:
|
||||
self.current_item = rv
|
||||
|
||||
# This allows show_item_func to be updated before the
|
||||
# item is processed. Only trigger at the beginning of
|
||||
# the update interval.
|
||||
if self._completed_intervals == 0:
|
||||
self.render_progress()
|
||||
|
||||
yield rv
|
||||
self.update(1)
|
||||
|
||||
self.finish()
|
||||
self.render_progress()
|
||||
|
||||
|
||||
def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None:
|
||||
"""Decide what method to use for paging through text."""
|
||||
stdout = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if stdout is None:
|
||||
stdout = StringIO()
|
||||
|
||||
if not isatty(sys.stdin) or not isatty(stdout):
|
||||
return _nullpager(stdout, generator, color)
|
||||
|
||||
# Split and normalize the pager command into parts.
|
||||
pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False)
|
||||
if pager_cmd_parts:
|
||||
if WIN:
|
||||
if _tempfilepager(generator, pager_cmd_parts, color):
|
||||
return
|
||||
elif _pipepager(generator, pager_cmd_parts, color):
|
||||
return
|
||||
|
||||
if os.environ.get("TERM") in ("dumb", "emacs"):
|
||||
return _nullpager(stdout, generator, color)
|
||||
if (WIN or sys.platform.startswith("os2")) and _tempfilepager(
|
||||
generator, ["more"], color
|
||||
):
|
||||
return
|
||||
if _pipepager(generator, ["less"], color):
|
||||
return
|
||||
|
||||
import tempfile
|
||||
|
||||
fd, filename = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
if _pipepager(generator, ["more"], color):
|
||||
return
|
||||
return _nullpager(stdout, generator, color)
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _pipepager(
|
||||
generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
|
||||
) -> bool:
|
||||
"""Page through text by feeding it to another program. Invoking a
|
||||
pager through this might support colors.
|
||||
|
||||
Returns `True` if the command was found, `False` otherwise and thus another
|
||||
pager should be attempted.
|
||||
"""
|
||||
# Split the command into the invoked CLI and its parameters.
|
||||
if not cmd_parts:
|
||||
return False
|
||||
|
||||
import shutil
|
||||
|
||||
cmd = cmd_parts[0]
|
||||
cmd_params = cmd_parts[1:]
|
||||
|
||||
cmd_filepath = shutil.which(cmd)
|
||||
if not cmd_filepath:
|
||||
return False
|
||||
# Resolves symlinks and produces a normalized absolute path string.
|
||||
cmd_path = Path(cmd_filepath).resolve()
|
||||
cmd_name = cmd_path.name
|
||||
|
||||
import subprocess
|
||||
|
||||
# Make a local copy of the environment to not affect the global one.
|
||||
env = dict(os.environ)
|
||||
|
||||
# If we're piping to less and the user hasn't decided on colors, we enable
|
||||
# them by default we find the -R flag in the command line arguments.
|
||||
if color is None and cmd_name == "less":
|
||||
less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}"
|
||||
if not less_flags:
|
||||
env["LESS"] = "-R"
|
||||
color = True
|
||||
elif "r" in less_flags or "R" in less_flags:
|
||||
color = True
|
||||
|
||||
c = subprocess.Popen(
|
||||
[str(cmd_path)] + cmd_params,
|
||||
shell=True,
|
||||
stdin=subprocess.PIPE,
|
||||
env=env,
|
||||
errors="replace",
|
||||
text=True,
|
||||
)
|
||||
assert c.stdin is not None
|
||||
try:
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
|
||||
c.stdin.write(text)
|
||||
except BrokenPipeError:
|
||||
# In case the pager exited unexpectedly, ignore the broken pipe error.
|
||||
pass
|
||||
except Exception as e:
|
||||
# In case there is an exception we want to close the pager immediately
|
||||
# and let the caller handle it.
|
||||
# Otherwise the pager will keep running, and the user may not notice
|
||||
# the error message, or worse yet it may leave the terminal in a broken state.
|
||||
c.terminate()
|
||||
raise e
|
||||
finally:
|
||||
# We must close stdin and wait for the pager to exit before we continue
|
||||
try:
|
||||
c.stdin.close()
|
||||
# Close implies flush, so it might throw a BrokenPipeError if the pager
|
||||
# process exited already.
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
|
||||
# search or other commands inside less).
|
||||
#
|
||||
# That means when the user hits ^C, the parent process (click) terminates,
|
||||
# but less is still alive, paging the output and messing up the terminal.
|
||||
#
|
||||
# If the user wants to make the pager exit on ^C, they should set
|
||||
# `LESS='-K'`. It's not our decision to make.
|
||||
while True:
|
||||
try:
|
||||
c.wait()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _tempfilepager(
|
||||
generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
|
||||
) -> bool:
|
||||
"""Page through text by invoking a program on a temporary file.
|
||||
|
||||
Returns `True` if the command was found, `False` otherwise and thus another
|
||||
pager should be attempted.
|
||||
"""
|
||||
# Split the command into the invoked CLI and its parameters.
|
||||
if not cmd_parts:
|
||||
return False
|
||||
|
||||
import shutil
|
||||
|
||||
cmd = cmd_parts[0]
|
||||
|
||||
cmd_filepath = shutil.which(cmd)
|
||||
if not cmd_filepath:
|
||||
return False
|
||||
# Resolves symlinks and produces a normalized absolute path string.
|
||||
cmd_path = Path(cmd_filepath).resolve()
|
||||
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
fd, filename = tempfile.mkstemp()
|
||||
# TODO: This never terminates if the passed generator never terminates.
|
||||
text = "".join(generator)
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
encoding = get_best_encoding(sys.stdout)
|
||||
with open_stream(filename, "wb")[0] as f:
|
||||
f.write(text.encode(encoding))
|
||||
try:
|
||||
subprocess.call([str(cmd_path), filename])
|
||||
except OSError:
|
||||
# Command not found
|
||||
pass
|
||||
finally:
|
||||
os.close(fd)
|
||||
os.unlink(filename)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _nullpager(
|
||||
stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None
|
||||
) -> None:
|
||||
"""Simply print unformatted text. This is the ultimate fallback."""
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
stream.write(text)
|
||||
|
||||
|
||||
class Editor:
|
||||
def __init__(
|
||||
self,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
) -> None:
|
||||
self.editor = editor
|
||||
self.env = env
|
||||
self.require_save = require_save
|
||||
self.extension = extension
|
||||
|
||||
def get_editor(self) -> str:
|
||||
if self.editor is not None:
|
||||
return self.editor
|
||||
for key in "VISUAL", "EDITOR":
|
||||
rv = os.environ.get(key)
|
||||
if rv:
|
||||
return rv
|
||||
if WIN:
|
||||
return "notepad"
|
||||
|
||||
from shutil import which
|
||||
|
||||
for editor in "sensible-editor", "vim", "nano":
|
||||
if which(editor) is not None:
|
||||
return editor
|
||||
return "vi"
|
||||
|
||||
def edit_files(self, filenames: cabc.Iterable[str]) -> None:
|
||||
import subprocess
|
||||
|
||||
editor = self.get_editor()
|
||||
environ: dict[str, str] | None = None
|
||||
|
||||
if self.env:
|
||||
environ = os.environ.copy()
|
||||
environ.update(self.env)
|
||||
|
||||
exc_filename = " ".join(f'"{filename}"' for filename in filenames)
|
||||
|
||||
try:
|
||||
c = subprocess.Popen(
|
||||
args=f"{editor} {exc_filename}", env=environ, shell=True
|
||||
)
|
||||
exit_code = c.wait()
|
||||
if exit_code != 0:
|
||||
raise ClickException(
|
||||
_("{editor}: Editing failed").format(editor=editor)
|
||||
)
|
||||
except OSError as e:
|
||||
raise ClickException(
|
||||
_("{editor}: Editing failed: {e}").format(editor=editor, e=e)
|
||||
) from e
|
||||
|
||||
@t.overload
|
||||
def edit(self, text: bytes | bytearray) -> bytes | None: ...
|
||||
|
||||
# We cannot know whether or not the type expected is str or bytes when None
|
||||
# is passed, so str is returned as that was what was done before.
|
||||
@t.overload
|
||||
def edit(self, text: str | None) -> str | None: ...
|
||||
|
||||
def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None:
|
||||
import tempfile
|
||||
|
||||
if text is None:
|
||||
data: bytes | bytearray = b""
|
||||
elif isinstance(text, (bytes, bytearray)):
|
||||
data = text
|
||||
else:
|
||||
if text and not text.endswith("\n"):
|
||||
text += "\n"
|
||||
|
||||
if WIN:
|
||||
data = text.replace("\n", "\r\n").encode("utf-8-sig")
|
||||
else:
|
||||
data = text.encode("utf-8")
|
||||
|
||||
fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
|
||||
f: t.BinaryIO
|
||||
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
# If the filesystem resolution is 1 second, like Mac OS
|
||||
# 10.12 Extended, or 2 seconds, like FAT32, and the editor
|
||||
# closes very fast, require_save can fail. Set the modified
|
||||
# time to be 2 seconds in the past to work around this.
|
||||
os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2))
|
||||
# Depending on the resolution, the exact value might not be
|
||||
# recorded, so get the new recorded value.
|
||||
timestamp = os.path.getmtime(name)
|
||||
|
||||
self.edit_files((name,))
|
||||
|
||||
if self.require_save and os.path.getmtime(name) == timestamp:
|
||||
return None
|
||||
|
||||
with open(name, "rb") as f:
|
||||
rv = f.read()
|
||||
|
||||
if isinstance(text, (bytes, bytearray)):
|
||||
return rv
|
||||
|
||||
return rv.decode("utf-8-sig").replace("\r\n", "\n")
|
||||
finally:
|
||||
os.unlink(name)
|
||||
|
||||
|
||||
def open_url(url: str, wait: bool = False, locate: bool = False) -> int:
|
||||
import subprocess
|
||||
|
||||
def _unquote_file(url: str) -> str:
|
||||
from urllib.parse import unquote
|
||||
|
||||
if url.startswith("file://"):
|
||||
url = unquote(url[7:])
|
||||
|
||||
return url
|
||||
|
||||
if sys.platform == "darwin":
|
||||
args = ["open"]
|
||||
if wait:
|
||||
args.append("-W")
|
||||
if locate:
|
||||
args.append("-R")
|
||||
args.append(_unquote_file(url))
|
||||
null = open("/dev/null", "w")
|
||||
try:
|
||||
return subprocess.Popen(args, stderr=null).wait()
|
||||
finally:
|
||||
null.close()
|
||||
elif WIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["explorer", f"/select,{url}"]
|
||||
else:
|
||||
args = ["start"]
|
||||
if wait:
|
||||
args.append("/WAIT")
|
||||
args.append("")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
elif CYGWIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["cygstart", os.path.dirname(url)]
|
||||
else:
|
||||
args = ["cygstart"]
|
||||
if wait:
|
||||
args.append("-w")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
|
||||
try:
|
||||
if locate:
|
||||
url = os.path.dirname(_unquote_file(url)) or "."
|
||||
else:
|
||||
url = _unquote_file(url)
|
||||
c = subprocess.Popen(["xdg-open", url])
|
||||
if wait:
|
||||
return c.wait()
|
||||
return 0
|
||||
except OSError:
|
||||
if url.startswith(("http://", "https://")) and not locate and not wait:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open(url)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def _translate_ch_to_exc(ch: str) -> None:
|
||||
if ch == "\x03":
|
||||
raise KeyboardInterrupt()
|
||||
|
||||
if ch == "\x04" and not WIN: # Unix-like, Ctrl+D
|
||||
raise EOFError()
|
||||
|
||||
if ch == "\x1a" and WIN: # Windows, Ctrl+Z
|
||||
raise EOFError()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
import msvcrt
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal() -> cabc.Iterator[int]:
|
||||
yield -1
|
||||
|
||||
def getchar(echo: bool) -> str:
|
||||
# The function `getch` will return a bytes object corresponding to
|
||||
# the pressed character. Since Windows 10 build 1803, it will also
|
||||
# return \x00 when called a second time after pressing a regular key.
|
||||
#
|
||||
# `getwch` does not share this probably-bugged behavior. Moreover, it
|
||||
# returns a Unicode object by default, which is what we want.
|
||||
#
|
||||
# Either of these functions will return \x00 or \xe0 to indicate
|
||||
# a special key, and you need to call the same function again to get
|
||||
# the "rest" of the code. The fun part is that \u00e0 is
|
||||
# "latin small letter a with grave", so if you type that on a French
|
||||
# keyboard, you _also_ get a \xe0.
|
||||
# E.g., consider the Up arrow. This returns \xe0 and then \x48. The
|
||||
# resulting Unicode string reads as "a with grave" + "capital H".
|
||||
# This is indistinguishable from when the user actually types
|
||||
# "a with grave" and then "capital H".
|
||||
#
|
||||
# When \xe0 is returned, we assume it's part of a special-key sequence
|
||||
# and call `getwch` again, but that means that when the user types
|
||||
# the \u00e0 character, `getchar` doesn't return until a second
|
||||
# character is typed.
|
||||
# The alternative is returning immediately, but that would mess up
|
||||
# cross-platform handling of arrow keys and others that start with
|
||||
# \xe0. Another option is using `getch`, but then we can't reliably
|
||||
# read non-ASCII characters, because return values of `getch` are
|
||||
# limited to the current 8-bit codepage.
|
||||
#
|
||||
# Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
|
||||
# is doing the right thing in more situations than with `getch`.
|
||||
|
||||
if echo:
|
||||
func = t.cast(t.Callable[[], str], msvcrt.getwche)
|
||||
else:
|
||||
func = t.cast(t.Callable[[], str], msvcrt.getwch)
|
||||
|
||||
rv = func()
|
||||
|
||||
if rv in ("\x00", "\xe0"):
|
||||
# \x00 and \xe0 are control characters that indicate special key,
|
||||
# see above.
|
||||
rv += func()
|
||||
|
||||
_translate_ch_to_exc(rv)
|
||||
return rv
|
||||
|
||||
else:
|
||||
import termios
|
||||
import tty
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal() -> cabc.Iterator[int]:
|
||||
f: t.TextIO | None
|
||||
fd: int
|
||||
|
||||
if not isatty(sys.stdin):
|
||||
f = open("/dev/tty")
|
||||
fd = f.fileno()
|
||||
else:
|
||||
fd = sys.stdin.fileno()
|
||||
f = None
|
||||
|
||||
try:
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
yield fd
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
sys.stdout.flush()
|
||||
|
||||
if f is not None:
|
||||
f.close()
|
||||
except termios.error:
|
||||
pass
|
||||
|
||||
def getchar(echo: bool) -> str:
|
||||
with raw_terminal() as fd:
|
||||
ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace")
|
||||
|
||||
if echo and isatty(sys.stdout):
|
||||
sys.stdout.write(ch)
|
||||
|
||||
_translate_ch_to_exc(ch)
|
||||
return ch
|
||||
51
tapdown/lib/python3.11/site-packages/click/_textwrap.py
Normal file
51
tapdown/lib/python3.11/site-packages/click/_textwrap.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import textwrap
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper):
|
||||
def _handle_long_word(
|
||||
self,
|
||||
reversed_chunks: list[str],
|
||||
cur_line: list[str],
|
||||
cur_len: int,
|
||||
width: int,
|
||||
) -> None:
|
||||
space_left = max(width - cur_len, 1)
|
||||
|
||||
if self.break_long_words:
|
||||
last = reversed_chunks[-1]
|
||||
cut = last[:space_left]
|
||||
res = last[space_left:]
|
||||
cur_line.append(cut)
|
||||
reversed_chunks[-1] = res
|
||||
elif not cur_line:
|
||||
cur_line.append(reversed_chunks.pop())
|
||||
|
||||
@contextmanager
|
||||
def extra_indent(self, indent: str) -> cabc.Iterator[None]:
|
||||
old_initial_indent = self.initial_indent
|
||||
old_subsequent_indent = self.subsequent_indent
|
||||
self.initial_indent += indent
|
||||
self.subsequent_indent += indent
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.initial_indent = old_initial_indent
|
||||
self.subsequent_indent = old_subsequent_indent
|
||||
|
||||
def indent_only(self, text: str) -> str:
|
||||
rv = []
|
||||
|
||||
for idx, line in enumerate(text.splitlines()):
|
||||
indent = self.initial_indent
|
||||
|
||||
if idx > 0:
|
||||
indent = self.subsequent_indent
|
||||
|
||||
rv.append(f"{indent}{line}")
|
||||
|
||||
return "\n".join(rv)
|
||||
36
tapdown/lib/python3.11/site-packages/click/_utils.py
Normal file
36
tapdown/lib/python3.11/site-packages/click/_utils.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import typing as t
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
"""Enum used to define sentinel values.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`PEP 661 - Sentinel Values <https://peps.python.org/pep-0661/>`_.
|
||||
"""
|
||||
|
||||
UNSET = object()
|
||||
FLAG_NEEDS_VALUE = object()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}.{self.name}"
|
||||
|
||||
|
||||
UNSET = Sentinel.UNSET
|
||||
"""Sentinel used to indicate that a value is not set."""
|
||||
|
||||
FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE
|
||||
"""Sentinel used to indicate an option was passed as a flag without a
|
||||
value but is not a flag option.
|
||||
|
||||
``Option.consume_value`` uses this to prompt or use the ``flag_value``.
|
||||
"""
|
||||
|
||||
T_UNSET = t.Literal[UNSET] # type: ignore[valid-type]
|
||||
"""Type hint for the :data:`UNSET` sentinel value."""
|
||||
|
||||
T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type]
|
||||
"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value."""
|
||||
296
tapdown/lib/python3.11/site-packages/click/_winconsole.py
Normal file
296
tapdown/lib/python3.11/site-packages/click/_winconsole.py
Normal file
@@ -0,0 +1,296 @@
|
||||
# This module is based on the excellent work by Adam Bartoš who
|
||||
# provided a lot of what went into the implementation here in
|
||||
# the discussion to issue1602 in the Python bug tracker.
|
||||
#
|
||||
# There are some general differences in regards to how this works
|
||||
# compared to the original patches as we do not need to patch
|
||||
# the entire interpreter but just work in our little world of
|
||||
# echo and prompt.
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import io
|
||||
import sys
|
||||
import time
|
||||
import typing as t
|
||||
from ctypes import Array
|
||||
from ctypes import byref
|
||||
from ctypes import c_char
|
||||
from ctypes import c_char_p
|
||||
from ctypes import c_int
|
||||
from ctypes import c_ssize_t
|
||||
from ctypes import c_ulong
|
||||
from ctypes import c_void_p
|
||||
from ctypes import POINTER
|
||||
from ctypes import py_object
|
||||
from ctypes import Structure
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes.wintypes import HANDLE
|
||||
from ctypes.wintypes import LPCWSTR
|
||||
from ctypes.wintypes import LPWSTR
|
||||
|
||||
from ._compat import _NonClosingTextIOWrapper
|
||||
|
||||
assert sys.platform == "win32"
|
||||
import msvcrt # noqa: E402
|
||||
from ctypes import windll # noqa: E402
|
||||
from ctypes import WINFUNCTYPE # noqa: E402
|
||||
|
||||
c_ssize_p = POINTER(c_ssize_t)
|
||||
|
||||
kernel32 = windll.kernel32
|
||||
GetStdHandle = kernel32.GetStdHandle
|
||||
ReadConsoleW = kernel32.ReadConsoleW
|
||||
WriteConsoleW = kernel32.WriteConsoleW
|
||||
GetConsoleMode = kernel32.GetConsoleMode
|
||||
GetLastError = kernel32.GetLastError
|
||||
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
|
||||
CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
|
||||
("CommandLineToArgvW", windll.shell32)
|
||||
)
|
||||
LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
|
||||
|
||||
STDIN_HANDLE = GetStdHandle(-10)
|
||||
STDOUT_HANDLE = GetStdHandle(-11)
|
||||
STDERR_HANDLE = GetStdHandle(-12)
|
||||
|
||||
PyBUF_SIMPLE = 0
|
||||
PyBUF_WRITABLE = 1
|
||||
|
||||
ERROR_SUCCESS = 0
|
||||
ERROR_NOT_ENOUGH_MEMORY = 8
|
||||
ERROR_OPERATION_ABORTED = 995
|
||||
|
||||
STDIN_FILENO = 0
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
EOF = b"\x1a"
|
||||
MAX_BYTES_WRITTEN = 32767
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
try:
|
||||
# Using `typing_extensions.Buffer` instead of `collections.abc`
|
||||
# on Windows for some reason does not have `Sized` implemented.
|
||||
from collections.abc import Buffer # type: ignore
|
||||
except ImportError:
|
||||
from typing_extensions import Buffer
|
||||
|
||||
try:
|
||||
from ctypes import pythonapi
|
||||
except ImportError:
|
||||
# On PyPy we cannot get buffers so our ability to operate here is
|
||||
# severely limited.
|
||||
get_buffer = None
|
||||
else:
|
||||
|
||||
class Py_buffer(Structure):
|
||||
_fields_ = [ # noqa: RUF012
|
||||
("buf", c_void_p),
|
||||
("obj", py_object),
|
||||
("len", c_ssize_t),
|
||||
("itemsize", c_ssize_t),
|
||||
("readonly", c_int),
|
||||
("ndim", c_int),
|
||||
("format", c_char_p),
|
||||
("shape", c_ssize_p),
|
||||
("strides", c_ssize_p),
|
||||
("suboffsets", c_ssize_p),
|
||||
("internal", c_void_p),
|
||||
]
|
||||
|
||||
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
|
||||
PyBuffer_Release = pythonapi.PyBuffer_Release
|
||||
|
||||
def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]:
|
||||
buf = Py_buffer()
|
||||
flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
|
||||
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
|
||||
|
||||
try:
|
||||
buffer_type = c_char * buf.len
|
||||
out: Array[c_char] = buffer_type.from_address(buf.buf)
|
||||
return out
|
||||
finally:
|
||||
PyBuffer_Release(byref(buf))
|
||||
|
||||
|
||||
class _WindowsConsoleRawIOBase(io.RawIOBase):
|
||||
def __init__(self, handle: int | None) -> None:
|
||||
self.handle = handle
|
||||
|
||||
def isatty(self) -> t.Literal[True]:
|
||||
super().isatty()
|
||||
return True
|
||||
|
||||
|
||||
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
|
||||
def readable(self) -> t.Literal[True]:
|
||||
return True
|
||||
|
||||
def readinto(self, b: Buffer) -> int:
|
||||
bytes_to_be_read = len(b)
|
||||
if not bytes_to_be_read:
|
||||
return 0
|
||||
elif bytes_to_be_read % 2:
|
||||
raise ValueError(
|
||||
"cannot read odd number of bytes from UTF-16-LE encoded console"
|
||||
)
|
||||
|
||||
buffer = get_buffer(b, writable=True)
|
||||
code_units_to_be_read = bytes_to_be_read // 2
|
||||
code_units_read = c_ulong()
|
||||
|
||||
rv = ReadConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buffer,
|
||||
code_units_to_be_read,
|
||||
byref(code_units_read),
|
||||
None,
|
||||
)
|
||||
if GetLastError() == ERROR_OPERATION_ABORTED:
|
||||
# wait for KeyboardInterrupt
|
||||
time.sleep(0.1)
|
||||
if not rv:
|
||||
raise OSError(f"Windows error: {GetLastError()}")
|
||||
|
||||
if buffer[0] == EOF:
|
||||
return 0
|
||||
return 2 * code_units_read.value
|
||||
|
||||
|
||||
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
|
||||
def writable(self) -> t.Literal[True]:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_error_message(errno: int) -> str:
|
||||
if errno == ERROR_SUCCESS:
|
||||
return "ERROR_SUCCESS"
|
||||
elif errno == ERROR_NOT_ENOUGH_MEMORY:
|
||||
return "ERROR_NOT_ENOUGH_MEMORY"
|
||||
return f"Windows error {errno}"
|
||||
|
||||
def write(self, b: Buffer) -> int:
|
||||
bytes_to_be_written = len(b)
|
||||
buf = get_buffer(b)
|
||||
code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
|
||||
code_units_written = c_ulong()
|
||||
|
||||
WriteConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buf,
|
||||
code_units_to_be_written,
|
||||
byref(code_units_written),
|
||||
None,
|
||||
)
|
||||
bytes_written = 2 * code_units_written.value
|
||||
|
||||
if bytes_written == 0 and bytes_to_be_written > 0:
|
||||
raise OSError(self._get_error_message(GetLastError()))
|
||||
return bytes_written
|
||||
|
||||
|
||||
class ConsoleStream:
|
||||
def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
|
||||
self._text_stream = text_stream
|
||||
self.buffer = byte_stream
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.buffer.name
|
||||
|
||||
def write(self, x: t.AnyStr) -> int:
|
||||
if isinstance(x, str):
|
||||
return self._text_stream.write(x)
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(x)
|
||||
|
||||
def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None:
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._text_stream, name)
|
||||
|
||||
def isatty(self) -> bool:
|
||||
return self.buffer.isatty()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>"
|
||||
|
||||
|
||||
def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
|
||||
0: _get_text_stdin,
|
||||
1: _get_text_stdout,
|
||||
2: _get_text_stderr,
|
||||
}
|
||||
|
||||
|
||||
def _is_console(f: t.TextIO) -> bool:
|
||||
if not hasattr(f, "fileno"):
|
||||
return False
|
||||
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except (OSError, io.UnsupportedOperation):
|
||||
return False
|
||||
|
||||
handle = msvcrt.get_osfhandle(fileno)
|
||||
return bool(GetConsoleMode(handle, byref(DWORD())))
|
||||
|
||||
|
||||
def _get_windows_console_stream(
|
||||
f: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> t.TextIO | None:
|
||||
if (
|
||||
get_buffer is None
|
||||
or encoding not in {"utf-16-le", None}
|
||||
or errors not in {"strict", None}
|
||||
or not _is_console(f)
|
||||
):
|
||||
return None
|
||||
|
||||
func = _stream_factories.get(f.fileno())
|
||||
if func is None:
|
||||
return None
|
||||
|
||||
b = getattr(f, "buffer", None)
|
||||
|
||||
if b is None:
|
||||
return None
|
||||
|
||||
return func(b)
|
||||
3347
tapdown/lib/python3.11/site-packages/click/core.py
Normal file
3347
tapdown/lib/python3.11/site-packages/click/core.py
Normal file
File diff suppressed because it is too large
Load Diff
551
tapdown/lib/python3.11/site-packages/click/decorators.py
Normal file
551
tapdown/lib/python3.11/site-packages/click/decorators.py
Normal file
@@ -0,0 +1,551 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from gettext import gettext as _
|
||||
|
||||
from .core import Argument
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Group
|
||||
from .core import Option
|
||||
from .core import Parameter
|
||||
from .globals import get_current_context
|
||||
from .utils import echo
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
P = te.ParamSpec("P")
|
||||
|
||||
R = t.TypeVar("R")
|
||||
T = t.TypeVar("T")
|
||||
_AnyCallable = t.Callable[..., t.Any]
|
||||
FC = t.TypeVar("FC", bound="_AnyCallable | Command")
|
||||
|
||||
|
||||
def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]:
|
||||
"""Marks a callback as wanting to receive the current context
|
||||
object as first argument.
|
||||
"""
|
||||
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
"""Similar to :func:`pass_context`, but only pass the object on the
|
||||
context onwards (:attr:`Context.obj`). This is useful if that object
|
||||
represents the state of a nested system.
|
||||
"""
|
||||
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
return f(get_current_context().obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def make_pass_decorator(
|
||||
object_type: type[T], ensure: bool = False
|
||||
) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
|
||||
"""Given an object type this creates a decorator that will work
|
||||
similar to :func:`pass_obj` but instead of passing the object of the
|
||||
current context, it will find the innermost context of type
|
||||
:func:`object_type`.
|
||||
|
||||
This generates a decorator that works roughly like this::
|
||||
|
||||
from functools import update_wrapper
|
||||
|
||||
def decorator(f):
|
||||
@pass_context
|
||||
def new_func(ctx, *args, **kwargs):
|
||||
obj = ctx.find_object(object_type)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
return decorator
|
||||
|
||||
:param object_type: the type of the object to pass.
|
||||
:param ensure: if set to `True`, a new object will be created and
|
||||
remembered on the context if it's not there yet.
|
||||
"""
|
||||
|
||||
def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
ctx = get_current_context()
|
||||
|
||||
obj: T | None
|
||||
if ensure:
|
||||
obj = ctx.ensure_object(object_type)
|
||||
else:
|
||||
obj = ctx.find_object(object_type)
|
||||
|
||||
if obj is None:
|
||||
raise RuntimeError(
|
||||
"Managed to invoke callback without a context"
|
||||
f" object of type {object_type.__name__!r}"
|
||||
" existing."
|
||||
)
|
||||
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def pass_meta_key(
|
||||
key: str, *, doc_description: str | None = None
|
||||
) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
|
||||
"""Create a decorator that passes a key from
|
||||
:attr:`click.Context.meta` as the first argument to the decorated
|
||||
function.
|
||||
|
||||
:param key: Key in ``Context.meta`` to pass.
|
||||
:param doc_description: Description of the object being passed,
|
||||
inserted into the decorator's docstring. Defaults to "the 'key'
|
||||
key from Context.meta".
|
||||
|
||||
.. versionadded:: 8.0
|
||||
"""
|
||||
|
||||
def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
ctx = get_current_context()
|
||||
obj = ctx.meta[key]
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
if doc_description is None:
|
||||
doc_description = f"the {key!r} key from :attr:`click.Context.meta`"
|
||||
|
||||
decorator.__doc__ = (
|
||||
f"Decorator that passes {doc_description} as the first argument"
|
||||
" to the decorated function."
|
||||
)
|
||||
return decorator
|
||||
|
||||
|
||||
CmdType = t.TypeVar("CmdType", bound=Command)
|
||||
|
||||
|
||||
# variant: no call, directly as decorator for a function.
|
||||
@t.overload
|
||||
def command(name: _AnyCallable) -> Command: ...
|
||||
|
||||
|
||||
# variant: with positional name and with positional or keyword cls argument:
|
||||
# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...)
|
||||
@t.overload
|
||||
def command(
|
||||
name: str | None,
|
||||
cls: type[CmdType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], CmdType]: ...
|
||||
|
||||
|
||||
# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...)
|
||||
@t.overload
|
||||
def command(
|
||||
name: None = None,
|
||||
*,
|
||||
cls: type[CmdType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], CmdType]: ...
|
||||
|
||||
|
||||
# variant: with optional string name, no cls argument provided.
|
||||
@t.overload
|
||||
def command(
|
||||
name: str | None = ..., cls: None = None, **attrs: t.Any
|
||||
) -> t.Callable[[_AnyCallable], Command]: ...
|
||||
|
||||
|
||||
def command(
|
||||
name: str | _AnyCallable | None = None,
|
||||
cls: type[CmdType] | None = None,
|
||||
**attrs: t.Any,
|
||||
) -> Command | t.Callable[[_AnyCallable], Command | CmdType]:
|
||||
r"""Creates a new :class:`Command` and uses the decorated function as
|
||||
callback. This will also automatically attach all decorated
|
||||
:func:`option`\s and :func:`argument`\s as parameters to the command.
|
||||
|
||||
The name of the command defaults to the name of the function, converted to
|
||||
lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes
|
||||
``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example,
|
||||
``init_data_command`` becomes ``init-data``.
|
||||
|
||||
All keyword arguments are forwarded to the underlying command class.
|
||||
For the ``params`` argument, any decorated params are appended to
|
||||
the end of the list.
|
||||
|
||||
Once decorated the function turns into a :class:`Command` instance
|
||||
that can be invoked as a command line utility or be attached to a
|
||||
command :class:`Group`.
|
||||
|
||||
:param name: The name of the command. Defaults to modifying the function's
|
||||
name as described above.
|
||||
:param cls: The command class to create. Defaults to :class:`Command`.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are
|
||||
removed when generating the name.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
This decorator can be applied without parentheses.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
The ``params`` argument can be used. Decorated params are
|
||||
appended to the end of the list.
|
||||
"""
|
||||
|
||||
func: t.Callable[[_AnyCallable], t.Any] | None = None
|
||||
|
||||
if callable(name):
|
||||
func = name
|
||||
name = None
|
||||
assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class."
|
||||
assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
|
||||
|
||||
if cls is None:
|
||||
cls = t.cast("type[CmdType]", Command)
|
||||
|
||||
def decorator(f: _AnyCallable) -> CmdType:
|
||||
if isinstance(f, Command):
|
||||
raise TypeError("Attempted to convert a callback into a command twice.")
|
||||
|
||||
attr_params = attrs.pop("params", None)
|
||||
params = attr_params if attr_params is not None else []
|
||||
|
||||
try:
|
||||
decorator_params = f.__click_params__ # type: ignore
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
del f.__click_params__ # type: ignore
|
||||
params.extend(reversed(decorator_params))
|
||||
|
||||
if attrs.get("help") is None:
|
||||
attrs["help"] = f.__doc__
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
assert cls is not None
|
||||
assert not callable(name)
|
||||
|
||||
if name is not None:
|
||||
cmd_name = name
|
||||
else:
|
||||
cmd_name = f.__name__.lower().replace("_", "-")
|
||||
cmd_left, sep, suffix = cmd_name.rpartition("-")
|
||||
|
||||
if sep and suffix in {"command", "cmd", "group", "grp"}:
|
||||
cmd_name = cmd_left
|
||||
|
||||
cmd = cls(name=cmd_name, callback=f, params=params, **attrs)
|
||||
cmd.__doc__ = f.__doc__
|
||||
return cmd
|
||||
|
||||
if func is not None:
|
||||
return decorator(func)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
GrpType = t.TypeVar("GrpType", bound=Group)
|
||||
|
||||
|
||||
# variant: no call, directly as decorator for a function.
|
||||
@t.overload
|
||||
def group(name: _AnyCallable) -> Group: ...
|
||||
|
||||
|
||||
# variant: with positional name and with positional or keyword cls argument:
|
||||
# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...)
|
||||
@t.overload
|
||||
def group(
|
||||
name: str | None,
|
||||
cls: type[GrpType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], GrpType]: ...
|
||||
|
||||
|
||||
# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...)
|
||||
@t.overload
|
||||
def group(
|
||||
name: None = None,
|
||||
*,
|
||||
cls: type[GrpType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], GrpType]: ...
|
||||
|
||||
|
||||
# variant: with optional string name, no cls argument provided.
|
||||
@t.overload
|
||||
def group(
|
||||
name: str | None = ..., cls: None = None, **attrs: t.Any
|
||||
) -> t.Callable[[_AnyCallable], Group]: ...
|
||||
|
||||
|
||||
def group(
|
||||
name: str | _AnyCallable | None = None,
|
||||
cls: type[GrpType] | None = None,
|
||||
**attrs: t.Any,
|
||||
) -> Group | t.Callable[[_AnyCallable], Group | GrpType]:
|
||||
"""Creates a new :class:`Group` with a function as callback. This
|
||||
works otherwise the same as :func:`command` just that the `cls`
|
||||
parameter is set to :class:`Group`.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
This decorator can be applied without parentheses.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = t.cast("type[GrpType]", Group)
|
||||
|
||||
if callable(name):
|
||||
return command(cls=cls, **attrs)(name)
|
||||
|
||||
return command(name, cls, **attrs)
|
||||
|
||||
|
||||
def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None:
|
||||
if isinstance(f, Command):
|
||||
f.params.append(param)
|
||||
else:
|
||||
if not hasattr(f, "__click_params__"):
|
||||
f.__click_params__ = [] # type: ignore
|
||||
|
||||
f.__click_params__.append(param) # type: ignore
|
||||
|
||||
|
||||
def argument(
|
||||
*param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Attaches an argument to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Argument`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Argument` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
For the default argument class, refer to :class:`Argument` and
|
||||
:class:`Parameter` for descriptions of parameters.
|
||||
|
||||
:param cls: the argument class to instantiate. This defaults to
|
||||
:class:`Argument`.
|
||||
:param param_decls: Passed as positional arguments to the constructor of
|
||||
``cls``.
|
||||
:param attrs: Passed as keyword arguments to the constructor of ``cls``.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Argument
|
||||
|
||||
def decorator(f: FC) -> FC:
|
||||
_param_memo(f, cls(param_decls, **attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def option(
|
||||
*param_decls: str, cls: type[Option] | None = None, **attrs: t.Any
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Attaches an option to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Option`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Option` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
For the default option class, refer to :class:`Option` and
|
||||
:class:`Parameter` for descriptions of parameters.
|
||||
|
||||
:param cls: the option class to instantiate. This defaults to
|
||||
:class:`Option`.
|
||||
:param param_decls: Passed as positional arguments to the constructor of
|
||||
``cls``.
|
||||
:param attrs: Passed as keyword arguments to the constructor of ``cls``.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Option
|
||||
|
||||
def decorator(f: FC) -> FC:
|
||||
_param_memo(f, cls(param_decls, **attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--yes`` option which shows a prompt before continuing if
|
||||
not passed. If the prompt is declined, the program will exit.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--yes"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
|
||||
def callback(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
if not value:
|
||||
ctx.abort()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--yes",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("callback", callback)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("prompt", "Do you want to continue?")
|
||||
kwargs.setdefault("help", "Confirm the action without prompting.")
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--password`` option which prompts for a password, hiding
|
||||
input and asking to enter the value again for confirmation.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--password"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
if not param_decls:
|
||||
param_decls = ("--password",)
|
||||
|
||||
kwargs.setdefault("prompt", True)
|
||||
kwargs.setdefault("confirmation_prompt", True)
|
||||
kwargs.setdefault("hide_input", True)
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def version_option(
|
||||
version: str | None = None,
|
||||
*param_decls: str,
|
||||
package_name: str | None = None,
|
||||
prog_name: str | None = None,
|
||||
message: str | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--version`` option which immediately prints the version
|
||||
number and exits the program.
|
||||
|
||||
If ``version`` is not provided, Click will try to detect it using
|
||||
:func:`importlib.metadata.version` to get the version for the
|
||||
``package_name``.
|
||||
|
||||
If ``package_name`` is not provided, Click will try to detect it by
|
||||
inspecting the stack frames. This will be used to detect the
|
||||
version, so it must match the name of the installed package.
|
||||
|
||||
:param version: The version number to show. If not provided, Click
|
||||
will try to detect it.
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--version"``.
|
||||
:param package_name: The package name to detect the version from. If
|
||||
not provided, Click will try to detect it.
|
||||
:param prog_name: The name of the CLI to show in the message. If not
|
||||
provided, it will be detected from the command.
|
||||
:param message: The message to show. The values ``%(prog)s``,
|
||||
``%(package)s``, and ``%(version)s`` are available. Defaults to
|
||||
``"%(prog)s, version %(version)s"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
:raise RuntimeError: ``version`` could not be detected.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Add the ``package_name`` parameter, and the ``%(package)s``
|
||||
value for messages.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
|
||||
version is detected based on the package name, not the entry
|
||||
point name. The Python package name must match the installed
|
||||
package name, or be passed with ``package_name=``.
|
||||
"""
|
||||
if message is None:
|
||||
message = _("%(prog)s, version %(version)s")
|
||||
|
||||
if version is None and package_name is None:
|
||||
frame = inspect.currentframe()
|
||||
f_back = frame.f_back if frame is not None else None
|
||||
f_globals = f_back.f_globals if f_back is not None else None
|
||||
# break reference cycle
|
||||
# https://docs.python.org/3/library/inspect.html#the-interpreter-stack
|
||||
del frame
|
||||
|
||||
if f_globals is not None:
|
||||
package_name = f_globals.get("__name__")
|
||||
|
||||
if package_name == "__main__":
|
||||
package_name = f_globals.get("__package__")
|
||||
|
||||
if package_name:
|
||||
package_name = package_name.partition(".")[0]
|
||||
|
||||
def callback(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
|
||||
nonlocal prog_name
|
||||
nonlocal version
|
||||
|
||||
if prog_name is None:
|
||||
prog_name = ctx.find_root().info_name
|
||||
|
||||
if version is None and package_name is not None:
|
||||
import importlib.metadata
|
||||
|
||||
try:
|
||||
version = importlib.metadata.version(package_name)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
raise RuntimeError(
|
||||
f"{package_name!r} is not installed. Try passing"
|
||||
" 'package_name' instead."
|
||||
) from None
|
||||
|
||||
if version is None:
|
||||
raise RuntimeError(
|
||||
f"Could not determine the version for {package_name!r} automatically."
|
||||
)
|
||||
|
||||
echo(
|
||||
message % {"prog": prog_name, "package": package_name, "version": version},
|
||||
color=ctx.color,
|
||||
)
|
||||
ctx.exit()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--version",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("is_eager", True)
|
||||
kwargs.setdefault("help", _("Show the version and exit."))
|
||||
kwargs["callback"] = callback
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Pre-configured ``--help`` option which immediately prints the help page
|
||||
and exits the program.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--help"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
|
||||
def show_help(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
"""Callback that print the help page on ``<stdout>`` and exits."""
|
||||
if value and not ctx.resilient_parsing:
|
||||
echo(ctx.get_help(), color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--help",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("is_eager", True)
|
||||
kwargs.setdefault("help", _("Show this message and exit."))
|
||||
kwargs.setdefault("callback", show_help)
|
||||
|
||||
return option(*param_decls, **kwargs)
|
||||
308
tapdown/lib/python3.11/site-packages/click/exceptions.py
Normal file
308
tapdown/lib/python3.11/site-packages/click/exceptions.py
Normal file
@@ -0,0 +1,308 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
from gettext import ngettext
|
||||
|
||||
from ._compat import get_text_stderr
|
||||
from .globals import resolve_color_default
|
||||
from .utils import echo
|
||||
from .utils import format_filename
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Parameter
|
||||
|
||||
|
||||
def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None:
|
||||
if param_hint is not None and not isinstance(param_hint, str):
|
||||
return " / ".join(repr(x) for x in param_hint)
|
||||
|
||||
return param_hint
|
||||
|
||||
|
||||
class ClickException(Exception):
|
||||
"""An exception that Click can handle and show to the user."""
|
||||
|
||||
#: The exit code for this exception.
|
||||
exit_code = 1
|
||||
|
||||
def __init__(self, message: str) -> None:
|
||||
super().__init__(message)
|
||||
# The context will be removed by the time we print the message, so cache
|
||||
# the color settings here to be used later on (in `show`)
|
||||
self.show_color: bool | None = resolve_color_default()
|
||||
self.message = message
|
||||
|
||||
def format_message(self) -> str:
|
||||
return self.message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.message
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
|
||||
echo(
|
||||
_("Error: {message}").format(message=self.format_message()),
|
||||
file=file,
|
||||
color=self.show_color,
|
||||
)
|
||||
|
||||
|
||||
class UsageError(ClickException):
|
||||
"""An internal exception that signals a usage error. This typically
|
||||
aborts any further handling.
|
||||
|
||||
:param message: the error message to display.
|
||||
:param ctx: optionally the context that caused this error. Click will
|
||||
fill in the context automatically in some situations.
|
||||
"""
|
||||
|
||||
exit_code = 2
|
||||
|
||||
def __init__(self, message: str, ctx: Context | None = None) -> None:
|
||||
super().__init__(message)
|
||||
self.ctx = ctx
|
||||
self.cmd: Command | None = self.ctx.command if self.ctx else None
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
color = None
|
||||
hint = ""
|
||||
if (
|
||||
self.ctx is not None
|
||||
and self.ctx.command.get_help_option(self.ctx) is not None
|
||||
):
|
||||
hint = _("Try '{command} {option}' for help.").format(
|
||||
command=self.ctx.command_path, option=self.ctx.help_option_names[0]
|
||||
)
|
||||
hint = f"{hint}\n"
|
||||
if self.ctx is not None:
|
||||
color = self.ctx.color
|
||||
echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
|
||||
echo(
|
||||
_("Error: {message}").format(message=self.format_message()),
|
||||
file=file,
|
||||
color=color,
|
||||
)
|
||||
|
||||
|
||||
class BadParameter(UsageError):
|
||||
"""An exception that formats out a standardized error message for a
|
||||
bad parameter. This is useful when thrown from a callback or type as
|
||||
Click will attach contextual information to it (for instance, which
|
||||
parameter it is).
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param param: the parameter object that caused this error. This can
|
||||
be left out, and Click will attach this info itself
|
||||
if possible.
|
||||
:param param_hint: a string that shows up as parameter name. This
|
||||
can be used as alternative to `param` in cases
|
||||
where custom validation should happen. If it is
|
||||
a string it's used as such, if it's a list then
|
||||
each item is quoted and separated.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
ctx: Context | None = None,
|
||||
param: Parameter | None = None,
|
||||
param_hint: cabc.Sequence[str] | str | None = None,
|
||||
) -> None:
|
||||
super().__init__(message, ctx)
|
||||
self.param = param
|
||||
self.param_hint = param_hint
|
||||
|
||||
def format_message(self) -> str:
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx) # type: ignore
|
||||
else:
|
||||
return _("Invalid value: {message}").format(message=self.message)
|
||||
|
||||
return _("Invalid value for {param_hint}: {message}").format(
|
||||
param_hint=_join_param_hints(param_hint), message=self.message
|
||||
)
|
||||
|
||||
|
||||
class MissingParameter(BadParameter):
|
||||
"""Raised if click required an option or argument but it was not
|
||||
provided when invoking the script.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param param_type: a string that indicates the type of the parameter.
|
||||
The default is to inherit the parameter type from
|
||||
the given `param`. Valid values are ``'parameter'``,
|
||||
``'option'`` or ``'argument'``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
ctx: Context | None = None,
|
||||
param: Parameter | None = None,
|
||||
param_hint: cabc.Sequence[str] | str | None = None,
|
||||
param_type: str | None = None,
|
||||
) -> None:
|
||||
super().__init__(message or "", ctx, param, param_hint)
|
||||
self.param_type = param_type
|
||||
|
||||
def format_message(self) -> str:
|
||||
if self.param_hint is not None:
|
||||
param_hint: cabc.Sequence[str] | str | None = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx) # type: ignore
|
||||
else:
|
||||
param_hint = None
|
||||
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
param_hint = f" {param_hint}" if param_hint else ""
|
||||
|
||||
param_type = self.param_type
|
||||
if param_type is None and self.param is not None:
|
||||
param_type = self.param.param_type_name
|
||||
|
||||
msg = self.message
|
||||
if self.param is not None:
|
||||
msg_extra = self.param.type.get_missing_message(
|
||||
param=self.param, ctx=self.ctx
|
||||
)
|
||||
if msg_extra:
|
||||
if msg:
|
||||
msg += f". {msg_extra}"
|
||||
else:
|
||||
msg = msg_extra
|
||||
|
||||
msg = f" {msg}" if msg else ""
|
||||
|
||||
# Translate param_type for known types.
|
||||
if param_type == "argument":
|
||||
missing = _("Missing argument")
|
||||
elif param_type == "option":
|
||||
missing = _("Missing option")
|
||||
elif param_type == "parameter":
|
||||
missing = _("Missing parameter")
|
||||
else:
|
||||
missing = _("Missing {param_type}").format(param_type=param_type)
|
||||
|
||||
return f"{missing}{param_hint}.{msg}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
if not self.message:
|
||||
param_name = self.param.name if self.param else None
|
||||
return _("Missing parameter: {param_name}").format(param_name=param_name)
|
||||
else:
|
||||
return self.message
|
||||
|
||||
|
||||
class NoSuchOption(UsageError):
|
||||
"""Raised if click attempted to handle an option that does not
|
||||
exist.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
option_name: str,
|
||||
message: str | None = None,
|
||||
possibilities: cabc.Sequence[str] | None = None,
|
||||
ctx: Context | None = None,
|
||||
) -> None:
|
||||
if message is None:
|
||||
message = _("No such option: {name}").format(name=option_name)
|
||||
|
||||
super().__init__(message, ctx)
|
||||
self.option_name = option_name
|
||||
self.possibilities = possibilities
|
||||
|
||||
def format_message(self) -> str:
|
||||
if not self.possibilities:
|
||||
return self.message
|
||||
|
||||
possibility_str = ", ".join(sorted(self.possibilities))
|
||||
suggest = ngettext(
|
||||
"Did you mean {possibility}?",
|
||||
"(Possible options: {possibilities})",
|
||||
len(self.possibilities),
|
||||
).format(possibility=possibility_str, possibilities=possibility_str)
|
||||
return f"{self.message} {suggest}"
|
||||
|
||||
|
||||
class BadOptionUsage(UsageError):
|
||||
"""Raised if an option is generally supplied but the use of the option
|
||||
was incorrect. This is for instance raised if the number of arguments
|
||||
for an option is not correct.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param option_name: the name of the option being used incorrectly.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, option_name: str, message: str, ctx: Context | None = None
|
||||
) -> None:
|
||||
super().__init__(message, ctx)
|
||||
self.option_name = option_name
|
||||
|
||||
|
||||
class BadArgumentUsage(UsageError):
|
||||
"""Raised if an argument is generally supplied but the use of the argument
|
||||
was incorrect. This is for instance raised if the number of values
|
||||
for an argument is not correct.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
|
||||
|
||||
class NoArgsIsHelpError(UsageError):
|
||||
def __init__(self, ctx: Context) -> None:
|
||||
self.ctx: Context
|
||||
super().__init__(ctx.get_help(), ctx=ctx)
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
echo(self.format_message(), file=file, err=True, color=self.ctx.color)
|
||||
|
||||
|
||||
class FileError(ClickException):
|
||||
"""Raised if a file cannot be opened."""
|
||||
|
||||
def __init__(self, filename: str, hint: str | None = None) -> None:
|
||||
if hint is None:
|
||||
hint = _("unknown error")
|
||||
|
||||
super().__init__(hint)
|
||||
self.ui_filename: str = format_filename(filename)
|
||||
self.filename = filename
|
||||
|
||||
def format_message(self) -> str:
|
||||
return _("Could not open file {filename!r}: {message}").format(
|
||||
filename=self.ui_filename, message=self.message
|
||||
)
|
||||
|
||||
|
||||
class Abort(RuntimeError):
|
||||
"""An internal signalling exception that signals Click to abort."""
|
||||
|
||||
|
||||
class Exit(RuntimeError):
|
||||
"""An exception that indicates that the application should exit with some
|
||||
status code.
|
||||
|
||||
:param code: the status code to exit with.
|
||||
"""
|
||||
|
||||
__slots__ = ("exit_code",)
|
||||
|
||||
def __init__(self, code: int = 0) -> None:
|
||||
self.exit_code: int = code
|
||||
301
tapdown/lib/python3.11/site-packages/click/formatting.py
Normal file
301
tapdown/lib/python3.11/site-packages/click/formatting.py
Normal file
@@ -0,0 +1,301 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
from contextlib import contextmanager
|
||||
from gettext import gettext as _
|
||||
|
||||
from ._compat import term_len
|
||||
from .parser import _split_opt
|
||||
|
||||
# Can force a width. This is used by the test system
|
||||
FORCED_WIDTH: int | None = None
|
||||
|
||||
|
||||
def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]:
|
||||
widths: dict[int, int] = {}
|
||||
|
||||
for row in rows:
|
||||
for idx, col in enumerate(row):
|
||||
widths[idx] = max(widths.get(idx, 0), term_len(col))
|
||||
|
||||
return tuple(y for x, y in sorted(widths.items()))
|
||||
|
||||
|
||||
def iter_rows(
|
||||
rows: cabc.Iterable[tuple[str, str]], col_count: int
|
||||
) -> cabc.Iterator[tuple[str, ...]]:
|
||||
for row in rows:
|
||||
yield row + ("",) * (col_count - len(row))
|
||||
|
||||
|
||||
def wrap_text(
|
||||
text: str,
|
||||
width: int = 78,
|
||||
initial_indent: str = "",
|
||||
subsequent_indent: str = "",
|
||||
preserve_paragraphs: bool = False,
|
||||
) -> str:
|
||||
"""A helper function that intelligently wraps text. By default, it
|
||||
assumes that it operates on a single paragraph of text but if the
|
||||
`preserve_paragraphs` parameter is provided it will intelligently
|
||||
handle paragraphs (defined by two empty lines).
|
||||
|
||||
If paragraphs are handled, a paragraph can be prefixed with an empty
|
||||
line containing the ``\\b`` character (``\\x08``) to indicate that
|
||||
no rewrapping should happen in that block.
|
||||
|
||||
:param text: the text that should be rewrapped.
|
||||
:param width: the maximum width for the text.
|
||||
:param initial_indent: the initial indent that should be placed on the
|
||||
first line as a string.
|
||||
:param subsequent_indent: the indent string that should be placed on
|
||||
each consecutive line.
|
||||
:param preserve_paragraphs: if this flag is set then the wrapping will
|
||||
intelligently handle paragraphs.
|
||||
"""
|
||||
from ._textwrap import TextWrapper
|
||||
|
||||
text = text.expandtabs()
|
||||
wrapper = TextWrapper(
|
||||
width,
|
||||
initial_indent=initial_indent,
|
||||
subsequent_indent=subsequent_indent,
|
||||
replace_whitespace=False,
|
||||
)
|
||||
if not preserve_paragraphs:
|
||||
return wrapper.fill(text)
|
||||
|
||||
p: list[tuple[int, bool, str]] = []
|
||||
buf: list[str] = []
|
||||
indent = None
|
||||
|
||||
def _flush_par() -> None:
|
||||
if not buf:
|
||||
return
|
||||
if buf[0].strip() == "\b":
|
||||
p.append((indent or 0, True, "\n".join(buf[1:])))
|
||||
else:
|
||||
p.append((indent or 0, False, " ".join(buf)))
|
||||
del buf[:]
|
||||
|
||||
for line in text.splitlines():
|
||||
if not line:
|
||||
_flush_par()
|
||||
indent = None
|
||||
else:
|
||||
if indent is None:
|
||||
orig_len = term_len(line)
|
||||
line = line.lstrip()
|
||||
indent = orig_len - term_len(line)
|
||||
buf.append(line)
|
||||
_flush_par()
|
||||
|
||||
rv = []
|
||||
for indent, raw, text in p:
|
||||
with wrapper.extra_indent(" " * indent):
|
||||
if raw:
|
||||
rv.append(wrapper.indent_only(text))
|
||||
else:
|
||||
rv.append(wrapper.fill(text))
|
||||
|
||||
return "\n\n".join(rv)
|
||||
|
||||
|
||||
class HelpFormatter:
|
||||
"""This class helps with formatting text-based help pages. It's
|
||||
usually just needed for very special internal cases, but it's also
|
||||
exposed so that developers can write their own fancy outputs.
|
||||
|
||||
At present, it always writes into memory.
|
||||
|
||||
:param indent_increment: the additional increment for each level.
|
||||
:param width: the width for the text. This defaults to the terminal
|
||||
width clamped to a maximum of 78.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
indent_increment: int = 2,
|
||||
width: int | None = None,
|
||||
max_width: int | None = None,
|
||||
) -> None:
|
||||
self.indent_increment = indent_increment
|
||||
if max_width is None:
|
||||
max_width = 80
|
||||
if width is None:
|
||||
import shutil
|
||||
|
||||
width = FORCED_WIDTH
|
||||
if width is None:
|
||||
width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50)
|
||||
self.width = width
|
||||
self.current_indent: int = 0
|
||||
self.buffer: list[str] = []
|
||||
|
||||
def write(self, string: str) -> None:
|
||||
"""Writes a unicode string into the internal buffer."""
|
||||
self.buffer.append(string)
|
||||
|
||||
def indent(self) -> None:
|
||||
"""Increases the indentation."""
|
||||
self.current_indent += self.indent_increment
|
||||
|
||||
def dedent(self) -> None:
|
||||
"""Decreases the indentation."""
|
||||
self.current_indent -= self.indent_increment
|
||||
|
||||
def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None:
|
||||
"""Writes a usage line into the buffer.
|
||||
|
||||
:param prog: the program name.
|
||||
:param args: whitespace separated list of arguments.
|
||||
:param prefix: The prefix for the first line. Defaults to
|
||||
``"Usage: "``.
|
||||
"""
|
||||
if prefix is None:
|
||||
prefix = f"{_('Usage:')} "
|
||||
|
||||
usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
|
||||
text_width = self.width - self.current_indent
|
||||
|
||||
if text_width >= (term_len(usage_prefix) + 20):
|
||||
# The arguments will fit to the right of the prefix.
|
||||
indent = " " * term_len(usage_prefix)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args,
|
||||
text_width,
|
||||
initial_indent=usage_prefix,
|
||||
subsequent_indent=indent,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# The prefix is too long, put the arguments on the next line.
|
||||
self.write(usage_prefix)
|
||||
self.write("\n")
|
||||
indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args, text_width, initial_indent=indent, subsequent_indent=indent
|
||||
)
|
||||
)
|
||||
|
||||
self.write("\n")
|
||||
|
||||
def write_heading(self, heading: str) -> None:
|
||||
"""Writes a heading into the buffer."""
|
||||
self.write(f"{'':>{self.current_indent}}{heading}:\n")
|
||||
|
||||
def write_paragraph(self) -> None:
|
||||
"""Writes a paragraph into the buffer."""
|
||||
if self.buffer:
|
||||
self.write("\n")
|
||||
|
||||
def write_text(self, text: str) -> None:
|
||||
"""Writes re-indented text into the buffer. This rewraps and
|
||||
preserves paragraphs.
|
||||
"""
|
||||
indent = " " * self.current_indent
|
||||
self.write(
|
||||
wrap_text(
|
||||
text,
|
||||
self.width,
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
preserve_paragraphs=True,
|
||||
)
|
||||
)
|
||||
self.write("\n")
|
||||
|
||||
def write_dl(
|
||||
self,
|
||||
rows: cabc.Sequence[tuple[str, str]],
|
||||
col_max: int = 30,
|
||||
col_spacing: int = 2,
|
||||
) -> None:
|
||||
"""Writes a definition list into the buffer. This is how options
|
||||
and commands are usually formatted.
|
||||
|
||||
:param rows: a list of two item tuples for the terms and values.
|
||||
:param col_max: the maximum width of the first column.
|
||||
:param col_spacing: the number of spaces between the first and
|
||||
second column.
|
||||
"""
|
||||
rows = list(rows)
|
||||
widths = measure_table(rows)
|
||||
if len(widths) != 2:
|
||||
raise TypeError("Expected two columns for definition list")
|
||||
|
||||
first_col = min(widths[0], col_max) + col_spacing
|
||||
|
||||
for first, second in iter_rows(rows, len(widths)):
|
||||
self.write(f"{'':>{self.current_indent}}{first}")
|
||||
if not second:
|
||||
self.write("\n")
|
||||
continue
|
||||
if term_len(first) <= first_col - col_spacing:
|
||||
self.write(" " * (first_col - term_len(first)))
|
||||
else:
|
||||
self.write("\n")
|
||||
self.write(" " * (first_col + self.current_indent))
|
||||
|
||||
text_width = max(self.width - first_col - 2, 10)
|
||||
wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
|
||||
lines = wrapped_text.splitlines()
|
||||
|
||||
if lines:
|
||||
self.write(f"{lines[0]}\n")
|
||||
|
||||
for line in lines[1:]:
|
||||
self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
|
||||
else:
|
||||
self.write("\n")
|
||||
|
||||
@contextmanager
|
||||
def section(self, name: str) -> cabc.Iterator[None]:
|
||||
"""Helpful context manager that writes a paragraph, a heading,
|
||||
and the indents.
|
||||
|
||||
:param name: the section name that is written as heading.
|
||||
"""
|
||||
self.write_paragraph()
|
||||
self.write_heading(name)
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
@contextmanager
|
||||
def indentation(self) -> cabc.Iterator[None]:
|
||||
"""A context manager that increases the indentation."""
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
def getvalue(self) -> str:
|
||||
"""Returns the buffer contents."""
|
||||
return "".join(self.buffer)
|
||||
|
||||
|
||||
def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]:
|
||||
"""Given a list of option strings this joins them in the most appropriate
|
||||
way and returns them in the form ``(formatted_string,
|
||||
any_prefix_is_slash)`` where the second item in the tuple is a flag that
|
||||
indicates if any of the option prefixes was a slash.
|
||||
"""
|
||||
rv = []
|
||||
any_prefix_is_slash = False
|
||||
|
||||
for opt in options:
|
||||
prefix = _split_opt(opt)[0]
|
||||
|
||||
if prefix == "/":
|
||||
any_prefix_is_slash = True
|
||||
|
||||
rv.append((len(prefix), opt))
|
||||
|
||||
rv.sort(key=lambda x: x[0])
|
||||
return ", ".join(x[1] for x in rv), any_prefix_is_slash
|
||||
67
tapdown/lib/python3.11/site-packages/click/globals.py
Normal file
67
tapdown/lib/python3.11/site-packages/click/globals.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from threading import local
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .core import Context
|
||||
|
||||
_local = local()
|
||||
|
||||
|
||||
@t.overload
|
||||
def get_current_context(silent: t.Literal[False] = False) -> Context: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def get_current_context(silent: bool = ...) -> Context | None: ...
|
||||
|
||||
|
||||
def get_current_context(silent: bool = False) -> Context | None:
|
||||
"""Returns the current click context. This can be used as a way to
|
||||
access the current context object from anywhere. This is a more implicit
|
||||
alternative to the :func:`pass_context` decorator. This function is
|
||||
primarily useful for helpers such as :func:`echo` which might be
|
||||
interested in changing its behavior based on the current context.
|
||||
|
||||
To push the current context, :meth:`Context.scope` can be used.
|
||||
|
||||
.. versionadded:: 5.0
|
||||
|
||||
:param silent: if set to `True` the return value is `None` if no context
|
||||
is available. The default behavior is to raise a
|
||||
:exc:`RuntimeError`.
|
||||
"""
|
||||
try:
|
||||
return t.cast("Context", _local.stack[-1])
|
||||
except (AttributeError, IndexError) as e:
|
||||
if not silent:
|
||||
raise RuntimeError("There is no active click context.") from e
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def push_context(ctx: Context) -> None:
|
||||
"""Pushes a new context to the current stack."""
|
||||
_local.__dict__.setdefault("stack", []).append(ctx)
|
||||
|
||||
|
||||
def pop_context() -> None:
|
||||
"""Removes the top level from the stack."""
|
||||
_local.stack.pop()
|
||||
|
||||
|
||||
def resolve_color_default(color: bool | None = None) -> bool | None:
|
||||
"""Internal helper to get the default value of the color flag. If a
|
||||
value is passed it's returned unchanged, otherwise it's looked up from
|
||||
the current context.
|
||||
"""
|
||||
if color is not None:
|
||||
return color
|
||||
|
||||
ctx = get_current_context(silent=True)
|
||||
|
||||
if ctx is not None:
|
||||
return ctx.color
|
||||
|
||||
return None
|
||||
532
tapdown/lib/python3.11/site-packages/click/parser.py
Normal file
532
tapdown/lib/python3.11/site-packages/click/parser.py
Normal file
@@ -0,0 +1,532 @@
|
||||
"""
|
||||
This module started out as largely a copy paste from the stdlib's
|
||||
optparse module with the features removed that we do not need from
|
||||
optparse because we implement them in Click on a higher level (for
|
||||
instance type handling, help formatting and a lot more).
|
||||
|
||||
The plan is to remove more and more from here over time.
|
||||
|
||||
The reason this is a different module and not optparse from the stdlib
|
||||
is that there are differences in 2.x and 3.x about the error messages
|
||||
generated and optparse in the stdlib uses gettext for no good reason
|
||||
and might cause us issues.
|
||||
|
||||
Click uses parts of optparse written by Gregory P. Ward and maintained
|
||||
by the Python Software Foundation. This is limited to code in parser.py.
|
||||
|
||||
Copyright 2001-2006 Gregory P. Ward. All rights reserved.
|
||||
Copyright 2002-2006 Python Software Foundation. All rights reserved.
|
||||
"""
|
||||
|
||||
# This code uses parts of optparse written by Gregory P. Ward and
|
||||
# maintained by the Python Software Foundation.
|
||||
# Copyright 2001-2006 Gregory P. Ward
|
||||
# Copyright 2002-2006 Python Software Foundation
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import typing as t
|
||||
from collections import deque
|
||||
from gettext import gettext as _
|
||||
from gettext import ngettext
|
||||
|
||||
from ._utils import FLAG_NEEDS_VALUE
|
||||
from ._utils import UNSET
|
||||
from .exceptions import BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage
|
||||
from .exceptions import NoSuchOption
|
||||
from .exceptions import UsageError
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ._utils import T_FLAG_NEEDS_VALUE
|
||||
from ._utils import T_UNSET
|
||||
from .core import Argument as CoreArgument
|
||||
from .core import Context
|
||||
from .core import Option as CoreOption
|
||||
from .core import Parameter as CoreParameter
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
|
||||
def _unpack_args(
|
||||
args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int]
|
||||
) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]:
|
||||
"""Given an iterable of arguments and an iterable of nargs specifications,
|
||||
it returns a tuple with all the unpacked arguments at the first index
|
||||
and all remaining arguments as the second.
|
||||
|
||||
The nargs specification is the number of arguments that should be consumed
|
||||
or `-1` to indicate that this position should eat up all the remainders.
|
||||
|
||||
Missing items are filled with ``UNSET``.
|
||||
"""
|
||||
args = deque(args)
|
||||
nargs_spec = deque(nargs_spec)
|
||||
rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = []
|
||||
spos: int | None = None
|
||||
|
||||
def _fetch(c: deque[V]) -> V | T_UNSET:
|
||||
try:
|
||||
if spos is None:
|
||||
return c.popleft()
|
||||
else:
|
||||
return c.pop()
|
||||
except IndexError:
|
||||
return UNSET
|
||||
|
||||
while nargs_spec:
|
||||
nargs = _fetch(nargs_spec)
|
||||
|
||||
if nargs is None:
|
||||
continue
|
||||
|
||||
if nargs == 1:
|
||||
rv.append(_fetch(args)) # type: ignore[arg-type]
|
||||
elif nargs > 1:
|
||||
x = [_fetch(args) for _ in range(nargs)]
|
||||
|
||||
# If we're reversed, we're pulling in the arguments in reverse,
|
||||
# so we need to turn them around.
|
||||
if spos is not None:
|
||||
x.reverse()
|
||||
|
||||
rv.append(tuple(x))
|
||||
elif nargs < 0:
|
||||
if spos is not None:
|
||||
raise TypeError("Cannot have two nargs < 0")
|
||||
|
||||
spos = len(rv)
|
||||
rv.append(UNSET)
|
||||
|
||||
# spos is the position of the wildcard (star). If it's not `None`,
|
||||
# we fill it with the remainder.
|
||||
if spos is not None:
|
||||
rv[spos] = tuple(args)
|
||||
args = []
|
||||
rv[spos + 1 :] = reversed(rv[spos + 1 :])
|
||||
|
||||
return tuple(rv), list(args)
|
||||
|
||||
|
||||
def _split_opt(opt: str) -> tuple[str, str]:
|
||||
first = opt[:1]
|
||||
if first.isalnum():
|
||||
return "", opt
|
||||
if opt[1:2] == first:
|
||||
return opt[:2], opt[2:]
|
||||
return first, opt[1:]
|
||||
|
||||
|
||||
def _normalize_opt(opt: str, ctx: Context | None) -> str:
|
||||
if ctx is None or ctx.token_normalize_func is None:
|
||||
return opt
|
||||
prefix, opt = _split_opt(opt)
|
||||
return f"{prefix}{ctx.token_normalize_func(opt)}"
|
||||
|
||||
|
||||
class _Option:
|
||||
def __init__(
|
||||
self,
|
||||
obj: CoreOption,
|
||||
opts: cabc.Sequence[str],
|
||||
dest: str | None,
|
||||
action: str | None = None,
|
||||
nargs: int = 1,
|
||||
const: t.Any | None = None,
|
||||
):
|
||||
self._short_opts = []
|
||||
self._long_opts = []
|
||||
self.prefixes: set[str] = set()
|
||||
|
||||
for opt in opts:
|
||||
prefix, value = _split_opt(opt)
|
||||
if not prefix:
|
||||
raise ValueError(f"Invalid start character for option ({opt})")
|
||||
self.prefixes.add(prefix[0])
|
||||
if len(prefix) == 1 and len(value) == 1:
|
||||
self._short_opts.append(opt)
|
||||
else:
|
||||
self._long_opts.append(opt)
|
||||
self.prefixes.add(prefix)
|
||||
|
||||
if action is None:
|
||||
action = "store"
|
||||
|
||||
self.dest = dest
|
||||
self.action = action
|
||||
self.nargs = nargs
|
||||
self.const = const
|
||||
self.obj = obj
|
||||
|
||||
@property
|
||||
def takes_value(self) -> bool:
|
||||
return self.action in ("store", "append")
|
||||
|
||||
def process(self, value: t.Any, state: _ParsingState) -> None:
|
||||
if self.action == "store":
|
||||
state.opts[self.dest] = value # type: ignore
|
||||
elif self.action == "store_const":
|
||||
state.opts[self.dest] = self.const # type: ignore
|
||||
elif self.action == "append":
|
||||
state.opts.setdefault(self.dest, []).append(value) # type: ignore
|
||||
elif self.action == "append_const":
|
||||
state.opts.setdefault(self.dest, []).append(self.const) # type: ignore
|
||||
elif self.action == "count":
|
||||
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore
|
||||
else:
|
||||
raise ValueError(f"unknown action '{self.action}'")
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class _Argument:
|
||||
def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1):
|
||||
self.dest = dest
|
||||
self.nargs = nargs
|
||||
self.obj = obj
|
||||
|
||||
def process(
|
||||
self,
|
||||
value: str | cabc.Sequence[str | None] | None | T_UNSET,
|
||||
state: _ParsingState,
|
||||
) -> None:
|
||||
if self.nargs > 1:
|
||||
assert isinstance(value, cabc.Sequence)
|
||||
holes = sum(1 for x in value if x is UNSET)
|
||||
if holes == len(value):
|
||||
value = UNSET
|
||||
elif holes != 0:
|
||||
raise BadArgumentUsage(
|
||||
_("Argument {name!r} takes {nargs} values.").format(
|
||||
name=self.dest, nargs=self.nargs
|
||||
)
|
||||
)
|
||||
|
||||
# We failed to collect any argument value so we consider the argument as unset.
|
||||
if value == ():
|
||||
value = UNSET
|
||||
|
||||
state.opts[self.dest] = value # type: ignore
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class _ParsingState:
|
||||
def __init__(self, rargs: list[str]) -> None:
|
||||
self.opts: dict[str, t.Any] = {}
|
||||
self.largs: list[str] = []
|
||||
self.rargs = rargs
|
||||
self.order: list[CoreParameter] = []
|
||||
|
||||
|
||||
class _OptionParser:
|
||||
"""The option parser is an internal class that is ultimately used to
|
||||
parse options and arguments. It's modelled after optparse and brings
|
||||
a similar but vastly simplified API. It should generally not be used
|
||||
directly as the high level Click classes wrap it for you.
|
||||
|
||||
It's not nearly as extensible as optparse or argparse as it does not
|
||||
implement features that are implemented on a higher level (such as
|
||||
types or defaults).
|
||||
|
||||
:param ctx: optionally the :class:`~click.Context` where this parser
|
||||
should go with.
|
||||
|
||||
.. deprecated:: 8.2
|
||||
Will be removed in Click 9.0.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx: Context | None = None) -> None:
|
||||
#: The :class:`~click.Context` for this parser. This might be
|
||||
#: `None` for some advanced use cases.
|
||||
self.ctx = ctx
|
||||
#: This controls how the parser deals with interspersed arguments.
|
||||
#: If this is set to `False`, the parser will stop on the first
|
||||
#: non-option. Click uses this to implement nested subcommands
|
||||
#: safely.
|
||||
self.allow_interspersed_args: bool = True
|
||||
#: This tells the parser how to deal with unknown options. By
|
||||
#: default it will error out (which is sensible), but there is a
|
||||
#: second mode where it will ignore it and continue processing
|
||||
#: after shifting all the unknown options into the resulting args.
|
||||
self.ignore_unknown_options: bool = False
|
||||
|
||||
if ctx is not None:
|
||||
self.allow_interspersed_args = ctx.allow_interspersed_args
|
||||
self.ignore_unknown_options = ctx.ignore_unknown_options
|
||||
|
||||
self._short_opt: dict[str, _Option] = {}
|
||||
self._long_opt: dict[str, _Option] = {}
|
||||
self._opt_prefixes = {"-", "--"}
|
||||
self._args: list[_Argument] = []
|
||||
|
||||
def add_option(
|
||||
self,
|
||||
obj: CoreOption,
|
||||
opts: cabc.Sequence[str],
|
||||
dest: str | None,
|
||||
action: str | None = None,
|
||||
nargs: int = 1,
|
||||
const: t.Any | None = None,
|
||||
) -> None:
|
||||
"""Adds a new option named `dest` to the parser. The destination
|
||||
is not inferred (unlike with optparse) and needs to be explicitly
|
||||
provided. Action can be any of ``store``, ``store_const``,
|
||||
``append``, ``append_const`` or ``count``.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
opts = [_normalize_opt(opt, self.ctx) for opt in opts]
|
||||
option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const)
|
||||
self._opt_prefixes.update(option.prefixes)
|
||||
for opt in option._short_opts:
|
||||
self._short_opt[opt] = option
|
||||
for opt in option._long_opts:
|
||||
self._long_opt[opt] = option
|
||||
|
||||
def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None:
|
||||
"""Adds a positional argument named `dest` to the parser.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
self._args.append(_Argument(obj, dest=dest, nargs=nargs))
|
||||
|
||||
def parse_args(
|
||||
self, args: list[str]
|
||||
) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]:
|
||||
"""Parses positional arguments and returns ``(values, args, order)``
|
||||
for the parsed options and arguments as well as the leftover
|
||||
arguments if there are any. The order is a list of objects as they
|
||||
appear on the command line. If arguments appear multiple times they
|
||||
will be memorized multiple times as well.
|
||||
"""
|
||||
state = _ParsingState(args)
|
||||
try:
|
||||
self._process_args_for_options(state)
|
||||
self._process_args_for_args(state)
|
||||
except UsageError:
|
||||
if self.ctx is None or not self.ctx.resilient_parsing:
|
||||
raise
|
||||
return state.opts, state.largs, state.order
|
||||
|
||||
def _process_args_for_args(self, state: _ParsingState) -> None:
|
||||
pargs, args = _unpack_args(
|
||||
state.largs + state.rargs, [x.nargs for x in self._args]
|
||||
)
|
||||
|
||||
for idx, arg in enumerate(self._args):
|
||||
arg.process(pargs[idx], state)
|
||||
|
||||
state.largs = args
|
||||
state.rargs = []
|
||||
|
||||
def _process_args_for_options(self, state: _ParsingState) -> None:
|
||||
while state.rargs:
|
||||
arg = state.rargs.pop(0)
|
||||
arglen = len(arg)
|
||||
# Double dashes always handled explicitly regardless of what
|
||||
# prefixes are valid.
|
||||
if arg == "--":
|
||||
return
|
||||
elif arg[:1] in self._opt_prefixes and arglen > 1:
|
||||
self._process_opts(arg, state)
|
||||
elif self.allow_interspersed_args:
|
||||
state.largs.append(arg)
|
||||
else:
|
||||
state.rargs.insert(0, arg)
|
||||
return
|
||||
|
||||
# Say this is the original argument list:
|
||||
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
|
||||
# ^
|
||||
# (we are about to process arg(i)).
|
||||
#
|
||||
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
|
||||
# [arg0, ..., arg(i-1)] (any options and their arguments will have
|
||||
# been removed from largs).
|
||||
#
|
||||
# The while loop will usually consume 1 or more arguments per pass.
|
||||
# If it consumes 1 (eg. arg is an option that takes no arguments),
|
||||
# then after _process_arg() is done the situation is:
|
||||
#
|
||||
# largs = subset of [arg0, ..., arg(i)]
|
||||
# rargs = [arg(i+1), ..., arg(N-1)]
|
||||
#
|
||||
# If allow_interspersed_args is false, largs will always be
|
||||
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
|
||||
# not a very interesting subset!
|
||||
|
||||
def _match_long_opt(
|
||||
self, opt: str, explicit_value: str | None, state: _ParsingState
|
||||
) -> None:
|
||||
if opt not in self._long_opt:
|
||||
from difflib import get_close_matches
|
||||
|
||||
possibilities = get_close_matches(opt, self._long_opt)
|
||||
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
|
||||
|
||||
option = self._long_opt[opt]
|
||||
if option.takes_value:
|
||||
# At this point it's safe to modify rargs by injecting the
|
||||
# explicit value, because no exception is raised in this
|
||||
# branch. This means that the inserted value will be fully
|
||||
# consumed.
|
||||
if explicit_value is not None:
|
||||
state.rargs.insert(0, explicit_value)
|
||||
|
||||
value = self._get_value_from_state(opt, option, state)
|
||||
|
||||
elif explicit_value is not None:
|
||||
raise BadOptionUsage(
|
||||
opt, _("Option {name!r} does not take a value.").format(name=opt)
|
||||
)
|
||||
|
||||
else:
|
||||
value = UNSET
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
def _match_short_opt(self, arg: str, state: _ParsingState) -> None:
|
||||
stop = False
|
||||
i = 1
|
||||
prefix = arg[0]
|
||||
unknown_options = []
|
||||
|
||||
for ch in arg[1:]:
|
||||
opt = _normalize_opt(f"{prefix}{ch}", self.ctx)
|
||||
option = self._short_opt.get(opt)
|
||||
i += 1
|
||||
|
||||
if not option:
|
||||
if self.ignore_unknown_options:
|
||||
unknown_options.append(ch)
|
||||
continue
|
||||
raise NoSuchOption(opt, ctx=self.ctx)
|
||||
if option.takes_value:
|
||||
# Any characters left in arg? Pretend they're the
|
||||
# next arg, and stop consuming characters of arg.
|
||||
if i < len(arg):
|
||||
state.rargs.insert(0, arg[i:])
|
||||
stop = True
|
||||
|
||||
value = self._get_value_from_state(opt, option, state)
|
||||
|
||||
else:
|
||||
value = UNSET
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
if stop:
|
||||
break
|
||||
|
||||
# If we got any unknown options we recombine the string of the
|
||||
# remaining options and re-attach the prefix, then report that
|
||||
# to the state as new larg. This way there is basic combinatorics
|
||||
# that can be achieved while still ignoring unknown arguments.
|
||||
if self.ignore_unknown_options and unknown_options:
|
||||
state.largs.append(f"{prefix}{''.join(unknown_options)}")
|
||||
|
||||
def _get_value_from_state(
|
||||
self, option_name: str, option: _Option, state: _ParsingState
|
||||
) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE:
|
||||
nargs = option.nargs
|
||||
|
||||
value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE
|
||||
|
||||
if len(state.rargs) < nargs:
|
||||
if option.obj._flag_needs_value:
|
||||
# Option allows omitting the value.
|
||||
value = FLAG_NEEDS_VALUE
|
||||
else:
|
||||
raise BadOptionUsage(
|
||||
option_name,
|
||||
ngettext(
|
||||
"Option {name!r} requires an argument.",
|
||||
"Option {name!r} requires {nargs} arguments.",
|
||||
nargs,
|
||||
).format(name=option_name, nargs=nargs),
|
||||
)
|
||||
elif nargs == 1:
|
||||
next_rarg = state.rargs[0]
|
||||
|
||||
if (
|
||||
option.obj._flag_needs_value
|
||||
and isinstance(next_rarg, str)
|
||||
and next_rarg[:1] in self._opt_prefixes
|
||||
and len(next_rarg) > 1
|
||||
):
|
||||
# The next arg looks like the start of an option, don't
|
||||
# use it as the value if omitting the value is allowed.
|
||||
value = FLAG_NEEDS_VALUE
|
||||
else:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
return value
|
||||
|
||||
def _process_opts(self, arg: str, state: _ParsingState) -> None:
|
||||
explicit_value = None
|
||||
# Long option handling happens in two parts. The first part is
|
||||
# supporting explicitly attached values. In any case, we will try
|
||||
# to long match the option first.
|
||||
if "=" in arg:
|
||||
long_opt, explicit_value = arg.split("=", 1)
|
||||
else:
|
||||
long_opt = arg
|
||||
norm_long_opt = _normalize_opt(long_opt, self.ctx)
|
||||
|
||||
# At this point we will match the (assumed) long option through
|
||||
# the long option matching code. Note that this allows options
|
||||
# like "-foo" to be matched as long options.
|
||||
try:
|
||||
self._match_long_opt(norm_long_opt, explicit_value, state)
|
||||
except NoSuchOption:
|
||||
# At this point the long option matching failed, and we need
|
||||
# to try with short options. However there is a special rule
|
||||
# which says, that if we have a two character options prefix
|
||||
# (applies to "--foo" for instance), we do not dispatch to the
|
||||
# short option code and will instead raise the no option
|
||||
# error.
|
||||
if arg[:2] not in self._opt_prefixes:
|
||||
self._match_short_opt(arg, state)
|
||||
return
|
||||
|
||||
if not self.ignore_unknown_options:
|
||||
raise
|
||||
|
||||
state.largs.append(arg)
|
||||
|
||||
|
||||
def __getattr__(name: str) -> object:
|
||||
import warnings
|
||||
|
||||
if name in {
|
||||
"OptionParser",
|
||||
"Argument",
|
||||
"Option",
|
||||
"split_opt",
|
||||
"normalize_opt",
|
||||
"ParsingState",
|
||||
}:
|
||||
warnings.warn(
|
||||
f"'parser.{name}' is deprecated and will be removed in Click 9.0."
|
||||
" The old parser is available in 'optparse'.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return globals()[f"_{name}"]
|
||||
|
||||
if name == "split_arg_string":
|
||||
from .shell_completion import split_arg_string
|
||||
|
||||
warnings.warn(
|
||||
"Importing 'parser.split_arg_string' is deprecated, it will only be"
|
||||
" available in 'shell_completion' in Click 9.0.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return split_arg_string
|
||||
|
||||
raise AttributeError(name)
|
||||
0
tapdown/lib/python3.11/site-packages/click/py.typed
Normal file
0
tapdown/lib/python3.11/site-packages/click/py.typed
Normal file
667
tapdown/lib/python3.11/site-packages/click/shell_completion.py
Normal file
667
tapdown/lib/python3.11/site-packages/click/shell_completion.py
Normal file
@@ -0,0 +1,667 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
|
||||
from .core import Argument
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Group
|
||||
from .core import Option
|
||||
from .core import Parameter
|
||||
from .core import ParameterSource
|
||||
from .utils import echo
|
||||
|
||||
|
||||
def shell_complete(
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
complete_var: str,
|
||||
instruction: str,
|
||||
) -> int:
|
||||
"""Perform shell completion for the given CLI program.
|
||||
|
||||
:param cli: Command being called.
|
||||
:param ctx_args: Extra arguments to pass to
|
||||
``cli.make_context``.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param complete_var: Name of the environment variable that holds
|
||||
the completion instruction.
|
||||
:param instruction: Value of ``complete_var`` with the completion
|
||||
instruction and shell, in the form ``instruction_shell``.
|
||||
:return: Status code to exit with.
|
||||
"""
|
||||
shell, _, instruction = instruction.partition("_")
|
||||
comp_cls = get_completion_class(shell)
|
||||
|
||||
if comp_cls is None:
|
||||
return 1
|
||||
|
||||
comp = comp_cls(cli, ctx_args, prog_name, complete_var)
|
||||
|
||||
if instruction == "source":
|
||||
echo(comp.source())
|
||||
return 0
|
||||
|
||||
if instruction == "complete":
|
||||
echo(comp.complete())
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
class CompletionItem:
|
||||
"""Represents a completion value and metadata about the value. The
|
||||
default metadata is ``type`` to indicate special shell handling,
|
||||
and ``help`` if a shell supports showing a help string next to the
|
||||
value.
|
||||
|
||||
Arbitrary parameters can be passed when creating the object, and
|
||||
accessed using ``item.attr``. If an attribute wasn't passed,
|
||||
accessing it returns ``None``.
|
||||
|
||||
:param value: The completion suggestion.
|
||||
:param type: Tells the shell script to provide special completion
|
||||
support for the type. Click uses ``"dir"`` and ``"file"``.
|
||||
:param help: String shown next to the value if supported.
|
||||
:param kwargs: Arbitrary metadata. The built-in implementations
|
||||
don't use this, but custom type completions paired with custom
|
||||
shell support could use it.
|
||||
"""
|
||||
|
||||
__slots__ = ("value", "type", "help", "_info")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: t.Any,
|
||||
type: str = "plain",
|
||||
help: str | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
self.value: t.Any = value
|
||||
self.type: str = type
|
||||
self.help: str | None = help
|
||||
self._info = kwargs
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return self._info.get(name)
|
||||
|
||||
|
||||
# Only Bash >= 4.4 has the nosort option.
|
||||
_SOURCE_BASH = """\
|
||||
%(complete_func)s() {
|
||||
local IFS=$'\\n'
|
||||
local response
|
||||
|
||||
response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \
|
||||
%(complete_var)s=bash_complete $1)
|
||||
|
||||
for completion in $response; do
|
||||
IFS=',' read type value <<< "$completion"
|
||||
|
||||
if [[ $type == 'dir' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o dirnames
|
||||
elif [[ $type == 'file' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o default
|
||||
elif [[ $type == 'plain' ]]; then
|
||||
COMPREPLY+=($value)
|
||||
fi
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
%(complete_func)s_setup() {
|
||||
complete -o nosort -F %(complete_func)s %(prog_name)s
|
||||
}
|
||||
|
||||
%(complete_func)s_setup;
|
||||
"""
|
||||
|
||||
# See ZshComplete.format_completion below, and issue #2703, before
|
||||
# changing this script.
|
||||
#
|
||||
# (TL;DR: _describe is picky about the format, but this Zsh script snippet
|
||||
# is already widely deployed. So freeze this script, and use clever-ish
|
||||
# handling of colons in ZshComplet.format_completion.)
|
||||
_SOURCE_ZSH = """\
|
||||
#compdef %(prog_name)s
|
||||
|
||||
%(complete_func)s() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
(( ! $+commands[%(prog_name)s] )) && return 1
|
||||
|
||||
response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \
|
||||
%(complete_var)s=zsh_complete %(prog_name)s)}")
|
||||
|
||||
for type key descr in ${response}; do
|
||||
if [[ "$type" == "plain" ]]; then
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
elif [[ "$type" == "dir" ]]; then
|
||||
_path_files -/
|
||||
elif [[ "$type" == "file" ]]; then
|
||||
_path_files -f
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -a completions
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
%(complete_func)s "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef %(complete_func)s %(prog_name)s
|
||||
fi
|
||||
"""
|
||||
|
||||
_SOURCE_FISH = """\
|
||||
function %(complete_func)s;
|
||||
set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \
|
||||
COMP_CWORD=(commandline -t) %(prog_name)s);
|
||||
|
||||
for completion in $response;
|
||||
set -l metadata (string split "," $completion);
|
||||
|
||||
if test $metadata[1] = "dir";
|
||||
__fish_complete_directories $metadata[2];
|
||||
else if test $metadata[1] = "file";
|
||||
__fish_complete_path $metadata[2];
|
||||
else if test $metadata[1] = "plain";
|
||||
echo $metadata[2];
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
complete --no-files --command %(prog_name)s --arguments \
|
||||
"(%(complete_func)s)";
|
||||
"""
|
||||
|
||||
|
||||
class ShellComplete:
|
||||
"""Base class for providing shell completion support. A subclass for
|
||||
a given shell will override attributes and methods to implement the
|
||||
completion instructions (``source`` and ``complete``).
|
||||
|
||||
:param cli: Command being called.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param complete_var: Name of the environment variable that holds
|
||||
the completion instruction.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
"""
|
||||
|
||||
name: t.ClassVar[str]
|
||||
"""Name to register the shell as with :func:`add_completion_class`.
|
||||
This is used in completion instructions (``{name}_source`` and
|
||||
``{name}_complete``).
|
||||
"""
|
||||
|
||||
source_template: t.ClassVar[str]
|
||||
"""Completion script template formatted by :meth:`source`. This must
|
||||
be provided by subclasses.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
complete_var: str,
|
||||
) -> None:
|
||||
self.cli = cli
|
||||
self.ctx_args = ctx_args
|
||||
self.prog_name = prog_name
|
||||
self.complete_var = complete_var
|
||||
|
||||
@property
|
||||
def func_name(self) -> str:
|
||||
"""The name of the shell function defined by the completion
|
||||
script.
|
||||
"""
|
||||
safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII)
|
||||
return f"_{safe_name}_completion"
|
||||
|
||||
def source_vars(self) -> dict[str, t.Any]:
|
||||
"""Vars for formatting :attr:`source_template`.
|
||||
|
||||
By default this provides ``complete_func``, ``complete_var``,
|
||||
and ``prog_name``.
|
||||
"""
|
||||
return {
|
||||
"complete_func": self.func_name,
|
||||
"complete_var": self.complete_var,
|
||||
"prog_name": self.prog_name,
|
||||
}
|
||||
|
||||
def source(self) -> str:
|
||||
"""Produce the shell script that defines the completion
|
||||
function. By default this ``%``-style formats
|
||||
:attr:`source_template` with the dict returned by
|
||||
:meth:`source_vars`.
|
||||
"""
|
||||
return self.source_template % self.source_vars()
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
"""Use the env vars defined by the shell script to return a
|
||||
tuple of ``args, incomplete``. This must be implemented by
|
||||
subclasses.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]:
|
||||
"""Determine the context and last complete command or parameter
|
||||
from the complete args. Call that object's ``shell_complete``
|
||||
method to get the completions for the incomplete value.
|
||||
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param incomplete: Value being completed. May be empty.
|
||||
"""
|
||||
ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args)
|
||||
obj, incomplete = _resolve_incomplete(ctx, args, incomplete)
|
||||
return obj.shell_complete(ctx, incomplete)
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
"""Format a completion item into the form recognized by the
|
||||
shell script. This must be implemented by subclasses.
|
||||
|
||||
:param item: Completion item to format.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def complete(self) -> str:
|
||||
"""Produce the completion data to send back to the shell.
|
||||
|
||||
By default this calls :meth:`get_completion_args`, gets the
|
||||
completions, then calls :meth:`format_completion` for each
|
||||
completion.
|
||||
"""
|
||||
args, incomplete = self.get_completion_args()
|
||||
completions = self.get_completions(args, incomplete)
|
||||
out = [self.format_completion(item) for item in completions]
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
class BashComplete(ShellComplete):
|
||||
"""Shell completion for Bash."""
|
||||
|
||||
name = "bash"
|
||||
source_template = _SOURCE_BASH
|
||||
|
||||
@staticmethod
|
||||
def _check_version() -> None:
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
bash_exe = shutil.which("bash")
|
||||
|
||||
if bash_exe is None:
|
||||
match = None
|
||||
else:
|
||||
output = subprocess.run(
|
||||
[bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'],
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
|
||||
|
||||
if match is not None:
|
||||
major, minor = match.groups()
|
||||
|
||||
if major < "4" or major == "4" and minor < "4":
|
||||
echo(
|
||||
_(
|
||||
"Shell completion is not supported for Bash"
|
||||
" versions older than 4.4."
|
||||
),
|
||||
err=True,
|
||||
)
|
||||
else:
|
||||
echo(
|
||||
_("Couldn't detect Bash version, shell completion is not supported."),
|
||||
err=True,
|
||||
)
|
||||
|
||||
def source(self) -> str:
|
||||
self._check_version()
|
||||
return super().source()
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
args = cwords[1:cword]
|
||||
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ""
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
return f"{item.type},{item.value}"
|
||||
|
||||
|
||||
class ZshComplete(ShellComplete):
|
||||
"""Shell completion for Zsh."""
|
||||
|
||||
name = "zsh"
|
||||
source_template = _SOURCE_ZSH
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
args = cwords[1:cword]
|
||||
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ""
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
help_ = item.help or "_"
|
||||
# The zsh completion script uses `_describe` on items with help
|
||||
# texts (which splits the item help from the item value at the
|
||||
# first unescaped colon) and `compadd` on items without help
|
||||
# text (which uses the item value as-is and does not support
|
||||
# colon escaping). So escape colons in the item value if and
|
||||
# only if the item help is not the sentinel "_" value, as used
|
||||
# by the completion script.
|
||||
#
|
||||
# (The zsh completion script is potentially widely deployed, and
|
||||
# thus harder to fix than this method.)
|
||||
#
|
||||
# See issue #1812 and issue #2703 for further context.
|
||||
value = item.value.replace(":", r"\:") if help_ != "_" else item.value
|
||||
return f"{item.type}\n{value}\n{help_}"
|
||||
|
||||
|
||||
class FishComplete(ShellComplete):
|
||||
"""Shell completion for Fish."""
|
||||
|
||||
name = "fish"
|
||||
source_template = _SOURCE_FISH
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
incomplete = os.environ["COMP_CWORD"]
|
||||
if incomplete:
|
||||
incomplete = split_arg_string(incomplete)[0]
|
||||
args = cwords[1:]
|
||||
|
||||
# Fish stores the partial word in both COMP_WORDS and
|
||||
# COMP_CWORD, remove it from complete args.
|
||||
if incomplete and args and args[-1] == incomplete:
|
||||
args.pop()
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
if item.help:
|
||||
return f"{item.type},{item.value}\t{item.help}"
|
||||
|
||||
return f"{item.type},{item.value}"
|
||||
|
||||
|
||||
ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]")
|
||||
|
||||
|
||||
_available_shells: dict[str, type[ShellComplete]] = {
|
||||
"bash": BashComplete,
|
||||
"fish": FishComplete,
|
||||
"zsh": ZshComplete,
|
||||
}
|
||||
|
||||
|
||||
def add_completion_class(
|
||||
cls: ShellCompleteType, name: str | None = None
|
||||
) -> ShellCompleteType:
|
||||
"""Register a :class:`ShellComplete` subclass under the given name.
|
||||
The name will be provided by the completion instruction environment
|
||||
variable during completion.
|
||||
|
||||
:param cls: The completion class that will handle completion for the
|
||||
shell.
|
||||
:param name: Name to register the class under. Defaults to the
|
||||
class's ``name`` attribute.
|
||||
"""
|
||||
if name is None:
|
||||
name = cls.name
|
||||
|
||||
_available_shells[name] = cls
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
def get_completion_class(shell: str) -> type[ShellComplete] | None:
|
||||
"""Look up a registered :class:`ShellComplete` subclass by the name
|
||||
provided by the completion instruction environment variable. If the
|
||||
name isn't registered, returns ``None``.
|
||||
|
||||
:param shell: Name the class is registered under.
|
||||
"""
|
||||
return _available_shells.get(shell)
|
||||
|
||||
|
||||
def split_arg_string(string: str) -> list[str]:
|
||||
"""Split an argument string as with :func:`shlex.split`, but don't
|
||||
fail if the string is incomplete. Ignores a missing closing quote or
|
||||
incomplete escape sequence and uses the partial token as-is.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
split_arg_string("example 'my file")
|
||||
["example", "my file"]
|
||||
|
||||
split_arg_string("example my\\")
|
||||
["example", "my"]
|
||||
|
||||
:param string: String to split.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Moved to ``shell_completion`` from ``parser``.
|
||||
"""
|
||||
import shlex
|
||||
|
||||
lex = shlex.shlex(string, posix=True)
|
||||
lex.whitespace_split = True
|
||||
lex.commenters = ""
|
||||
out = []
|
||||
|
||||
try:
|
||||
for token in lex:
|
||||
out.append(token)
|
||||
except ValueError:
|
||||
# Raised when end-of-string is reached in an invalid state. Use
|
||||
# the partial token as-is. The quote or escape character is in
|
||||
# lex.state, not lex.token.
|
||||
out.append(lex.token)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool:
|
||||
"""Determine if the given parameter is an argument that can still
|
||||
accept values.
|
||||
|
||||
:param ctx: Invocation context for the command represented by the
|
||||
parsed complete args.
|
||||
:param param: Argument object being checked.
|
||||
"""
|
||||
if not isinstance(param, Argument):
|
||||
return False
|
||||
|
||||
assert param.name is not None
|
||||
# Will be None if expose_value is False.
|
||||
value = ctx.params.get(param.name)
|
||||
return (
|
||||
param.nargs == -1
|
||||
or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE
|
||||
or (
|
||||
param.nargs > 1
|
||||
and isinstance(value, (tuple, list))
|
||||
and len(value) < param.nargs
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _start_of_option(ctx: Context, value: str) -> bool:
|
||||
"""Check if the value looks like the start of an option."""
|
||||
if not value:
|
||||
return False
|
||||
|
||||
c = value[0]
|
||||
return c in ctx._opt_prefixes
|
||||
|
||||
|
||||
def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool:
|
||||
"""Determine if the given parameter is an option that needs a value.
|
||||
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param param: Option object being checked.
|
||||
"""
|
||||
if not isinstance(param, Option):
|
||||
return False
|
||||
|
||||
if param.is_flag or param.count:
|
||||
return False
|
||||
|
||||
last_option = None
|
||||
|
||||
for index, arg in enumerate(reversed(args)):
|
||||
if index + 1 > param.nargs:
|
||||
break
|
||||
|
||||
if _start_of_option(ctx, arg):
|
||||
last_option = arg
|
||||
break
|
||||
|
||||
return last_option is not None and last_option in param.opts
|
||||
|
||||
|
||||
def _resolve_context(
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
args: list[str],
|
||||
) -> Context:
|
||||
"""Produce the context hierarchy starting with the command and
|
||||
traversing the complete arguments. This only follows the commands,
|
||||
it doesn't trigger input prompts or callbacks.
|
||||
|
||||
:param cli: Command being called.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param args: List of complete args before the incomplete value.
|
||||
"""
|
||||
ctx_args["resilient_parsing"] = True
|
||||
with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx:
|
||||
args = ctx._protected_args + ctx.args
|
||||
|
||||
while args:
|
||||
command = ctx.command
|
||||
|
||||
if isinstance(command, Group):
|
||||
if not command.chain:
|
||||
name, cmd, args = command.resolve_command(ctx, args)
|
||||
|
||||
if cmd is None:
|
||||
return ctx
|
||||
|
||||
with cmd.make_context(
|
||||
name, args, parent=ctx, resilient_parsing=True
|
||||
) as sub_ctx:
|
||||
ctx = sub_ctx
|
||||
args = ctx._protected_args + ctx.args
|
||||
else:
|
||||
sub_ctx = ctx
|
||||
|
||||
while args:
|
||||
name, cmd, args = command.resolve_command(ctx, args)
|
||||
|
||||
if cmd is None:
|
||||
return ctx
|
||||
|
||||
with cmd.make_context(
|
||||
name,
|
||||
args,
|
||||
parent=ctx,
|
||||
allow_extra_args=True,
|
||||
allow_interspersed_args=False,
|
||||
resilient_parsing=True,
|
||||
) as sub_sub_ctx:
|
||||
sub_ctx = sub_sub_ctx
|
||||
args = sub_ctx.args
|
||||
|
||||
ctx = sub_ctx
|
||||
args = [*sub_ctx._protected_args, *sub_ctx.args]
|
||||
else:
|
||||
break
|
||||
|
||||
return ctx
|
||||
|
||||
|
||||
def _resolve_incomplete(
|
||||
ctx: Context, args: list[str], incomplete: str
|
||||
) -> tuple[Command | Parameter, str]:
|
||||
"""Find the Click object that will handle the completion of the
|
||||
incomplete value. Return the object and the incomplete value.
|
||||
|
||||
:param ctx: Invocation context for the command represented by
|
||||
the parsed complete args.
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param incomplete: Value being completed. May be empty.
|
||||
"""
|
||||
# Different shells treat an "=" between a long option name and
|
||||
# value differently. Might keep the value joined, return the "="
|
||||
# as a separate item, or return the split name and value. Always
|
||||
# split and discard the "=" to make completion easier.
|
||||
if incomplete == "=":
|
||||
incomplete = ""
|
||||
elif "=" in incomplete and _start_of_option(ctx, incomplete):
|
||||
name, _, incomplete = incomplete.partition("=")
|
||||
args.append(name)
|
||||
|
||||
# The "--" marker tells Click to stop treating values as options
|
||||
# even if they start with the option character. If it hasn't been
|
||||
# given and the incomplete arg looks like an option, the current
|
||||
# command will provide option name completions.
|
||||
if "--" not in args and _start_of_option(ctx, incomplete):
|
||||
return ctx.command, incomplete
|
||||
|
||||
params = ctx.command.get_params(ctx)
|
||||
|
||||
# If the last complete arg is an option name with an incomplete
|
||||
# value, the option will provide value completions.
|
||||
for param in params:
|
||||
if _is_incomplete_option(ctx, args, param):
|
||||
return param, incomplete
|
||||
|
||||
# It's not an option name or value. The first argument without a
|
||||
# parsed value will provide value completions.
|
||||
for param in params:
|
||||
if _is_incomplete_argument(ctx, param):
|
||||
return param, incomplete
|
||||
|
||||
# There were no unparsed arguments, the command may be a group that
|
||||
# will provide command name completions.
|
||||
return ctx.command, incomplete
|
||||
877
tapdown/lib/python3.11/site-packages/click/termui.py
Normal file
877
tapdown/lib/python3.11/site-packages/click/termui.py
Normal file
@@ -0,0 +1,877 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import sys
|
||||
import typing as t
|
||||
from contextlib import AbstractContextManager
|
||||
from gettext import gettext as _
|
||||
|
||||
from ._compat import isatty
|
||||
from ._compat import strip_ansi
|
||||
from .exceptions import Abort
|
||||
from .exceptions import UsageError
|
||||
from .globals import resolve_color_default
|
||||
from .types import Choice
|
||||
from .types import convert_type
|
||||
from .types import ParamType
|
||||
from .utils import echo
|
||||
from .utils import LazyFile
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ._termui_impl import ProgressBar
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
# The prompt functions to use. The doc tools currently override these
|
||||
# functions to customize how they work.
|
||||
visible_prompt_func: t.Callable[[str], str] = input
|
||||
|
||||
_ansi_colors = {
|
||||
"black": 30,
|
||||
"red": 31,
|
||||
"green": 32,
|
||||
"yellow": 33,
|
||||
"blue": 34,
|
||||
"magenta": 35,
|
||||
"cyan": 36,
|
||||
"white": 37,
|
||||
"reset": 39,
|
||||
"bright_black": 90,
|
||||
"bright_red": 91,
|
||||
"bright_green": 92,
|
||||
"bright_yellow": 93,
|
||||
"bright_blue": 94,
|
||||
"bright_magenta": 95,
|
||||
"bright_cyan": 96,
|
||||
"bright_white": 97,
|
||||
}
|
||||
_ansi_reset_all = "\033[0m"
|
||||
|
||||
|
||||
def hidden_prompt_func(prompt: str) -> str:
|
||||
import getpass
|
||||
|
||||
return getpass.getpass(prompt)
|
||||
|
||||
|
||||
def _build_prompt(
|
||||
text: str,
|
||||
suffix: str,
|
||||
show_default: bool = False,
|
||||
default: t.Any | None = None,
|
||||
show_choices: bool = True,
|
||||
type: ParamType | None = None,
|
||||
) -> str:
|
||||
prompt = text
|
||||
if type is not None and show_choices and isinstance(type, Choice):
|
||||
prompt += f" ({', '.join(map(str, type.choices))})"
|
||||
if default is not None and show_default:
|
||||
prompt = f"{prompt} [{_format_default(default)}]"
|
||||
return f"{prompt}{suffix}"
|
||||
|
||||
|
||||
def _format_default(default: t.Any) -> t.Any:
|
||||
if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
|
||||
return default.name
|
||||
|
||||
return default
|
||||
|
||||
|
||||
def prompt(
|
||||
text: str,
|
||||
default: t.Any | None = None,
|
||||
hide_input: bool = False,
|
||||
confirmation_prompt: bool | str = False,
|
||||
type: ParamType | t.Any | None = None,
|
||||
value_proc: t.Callable[[str], t.Any] | None = None,
|
||||
prompt_suffix: str = ": ",
|
||||
show_default: bool = True,
|
||||
err: bool = False,
|
||||
show_choices: bool = True,
|
||||
) -> t.Any:
|
||||
"""Prompts a user for input. This is a convenience function that can
|
||||
be used to prompt a user for input later.
|
||||
|
||||
If the user aborts the input by sending an interrupt signal, this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
:param text: the text to show for the prompt.
|
||||
:param default: the default value to use if no input happens. If this
|
||||
is not given it will prompt until it's aborted.
|
||||
:param hide_input: if this is set to true then the input value will
|
||||
be hidden.
|
||||
:param confirmation_prompt: Prompt a second time to confirm the
|
||||
value. Can be set to a string instead of ``True`` to customize
|
||||
the message.
|
||||
:param type: the type to use to check the value against.
|
||||
:param value_proc: if this parameter is provided it's a function that
|
||||
is invoked instead of the type conversion to
|
||||
convert a value.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
:param show_choices: Show or hide choices if the passed type is a Choice.
|
||||
For example if type is a Choice of either day or week,
|
||||
show_choices is true and text is "Group by" then the
|
||||
prompt will be "Group by (day, week): ".
|
||||
|
||||
.. versionadded:: 8.0
|
||||
``confirmation_prompt`` can be a custom string.
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added the ``show_choices`` parameter.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
Added unicode support for cmd.exe on Windows.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
"""
|
||||
|
||||
def prompt_func(text: str) -> str:
|
||||
f = hidden_prompt_func if hide_input else visible_prompt_func
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(text.rstrip(" "), nl=False, err=err)
|
||||
# Echo a space to stdout to work around an issue where
|
||||
# readline causes backspace to clear the whole line.
|
||||
return f(" ")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
# getpass doesn't print a newline if the user aborts input with ^C.
|
||||
# Allegedly this behavior is inherited from getpass(3).
|
||||
# A doc bug has been filed at https://bugs.python.org/issue24711
|
||||
if hide_input:
|
||||
echo(None, err=err)
|
||||
raise Abort() from None
|
||||
|
||||
if value_proc is None:
|
||||
value_proc = convert_type(type, default)
|
||||
|
||||
prompt = _build_prompt(
|
||||
text, prompt_suffix, show_default, default, show_choices, type
|
||||
)
|
||||
|
||||
if confirmation_prompt:
|
||||
if confirmation_prompt is True:
|
||||
confirmation_prompt = _("Repeat for confirmation")
|
||||
|
||||
confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix)
|
||||
|
||||
while True:
|
||||
while True:
|
||||
value = prompt_func(prompt)
|
||||
if value:
|
||||
break
|
||||
elif default is not None:
|
||||
value = default
|
||||
break
|
||||
try:
|
||||
result = value_proc(value)
|
||||
except UsageError as e:
|
||||
if hide_input:
|
||||
echo(_("Error: The value you entered was invalid."), err=err)
|
||||
else:
|
||||
echo(_("Error: {e.message}").format(e=e), err=err)
|
||||
continue
|
||||
if not confirmation_prompt:
|
||||
return result
|
||||
while True:
|
||||
value2 = prompt_func(confirmation_prompt)
|
||||
is_empty = not value and not value2
|
||||
if value2 or is_empty:
|
||||
break
|
||||
if value == value2:
|
||||
return result
|
||||
echo(_("Error: The two entered values do not match."), err=err)
|
||||
|
||||
|
||||
def confirm(
|
||||
text: str,
|
||||
default: bool | None = False,
|
||||
abort: bool = False,
|
||||
prompt_suffix: str = ": ",
|
||||
show_default: bool = True,
|
||||
err: bool = False,
|
||||
) -> bool:
|
||||
"""Prompts for confirmation (yes/no question).
|
||||
|
||||
If the user aborts the input by sending a interrupt signal this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
:param text: the question to ask.
|
||||
:param default: The default value to use when no input is given. If
|
||||
``None``, repeat until input is given.
|
||||
:param abort: if this is set to `True` a negative answer aborts the
|
||||
exception by raising :exc:`Abort`.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Repeat until input is given if ``default`` is ``None``.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the ``err`` parameter.
|
||||
"""
|
||||
prompt = _build_prompt(
|
||||
text,
|
||||
prompt_suffix,
|
||||
show_default,
|
||||
"y/n" if default is None else ("Y/n" if default else "y/N"),
|
||||
)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(prompt.rstrip(" "), nl=False, err=err)
|
||||
# Echo a space to stdout to work around an issue where
|
||||
# readline causes backspace to clear the whole line.
|
||||
value = visible_prompt_func(" ").lower().strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
raise Abort() from None
|
||||
if value in ("y", "yes"):
|
||||
rv = True
|
||||
elif value in ("n", "no"):
|
||||
rv = False
|
||||
elif default is not None and value == "":
|
||||
rv = default
|
||||
else:
|
||||
echo(_("Error: invalid input"), err=err)
|
||||
continue
|
||||
break
|
||||
if abort and not rv:
|
||||
raise Abort()
|
||||
return rv
|
||||
|
||||
|
||||
def echo_via_pager(
|
||||
text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str,
|
||||
color: bool | None = None,
|
||||
) -> None:
|
||||
"""This function takes a text and shows it via an environment specific
|
||||
pager on stdout.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param text_or_generator: the text to page, or alternatively, a
|
||||
generator emitting the text to page.
|
||||
:param color: controls if the pager supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if inspect.isgeneratorfunction(text_or_generator):
|
||||
i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)()
|
||||
elif isinstance(text_or_generator, str):
|
||||
i = [text_or_generator]
|
||||
else:
|
||||
i = iter(t.cast("cabc.Iterable[str]", text_or_generator))
|
||||
|
||||
# convert every element of i to a text type if necessary
|
||||
text_generator = (el if isinstance(el, str) else str(el) for el in i)
|
||||
|
||||
from ._termui_impl import pager
|
||||
|
||||
return pager(itertools.chain(text_generator, "\n"), color)
|
||||
|
||||
|
||||
@t.overload
|
||||
def progressbar(
|
||||
*,
|
||||
length: int,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[int]: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def progressbar(
|
||||
iterable: cabc.Iterable[V] | None = None,
|
||||
length: int | None = None,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[V]: ...
|
||||
|
||||
|
||||
def progressbar(
|
||||
iterable: cabc.Iterable[V] | None = None,
|
||||
length: int | None = None,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[V]:
|
||||
"""This function creates an iterable context manager that can be used
|
||||
to iterate over something while showing a progress bar. It will
|
||||
either iterate over the `iterable` or `length` items (that are counted
|
||||
up). While iteration happens, this function will print a rendered
|
||||
progress bar to the given `file` (defaults to stdout) and will attempt
|
||||
to calculate remaining time and more. By default, this progress bar
|
||||
will not be rendered if the file is not a terminal.
|
||||
|
||||
The context manager creates the progress bar. When the context
|
||||
manager is entered the progress bar is already created. With every
|
||||
iteration over the progress bar, the iterable passed to the bar is
|
||||
advanced and the bar is updated. When the context manager exits,
|
||||
a newline is printed and the progress bar is finalized on screen.
|
||||
|
||||
Note: The progress bar is currently designed for use cases where the
|
||||
total progress can be expected to take at least several seconds.
|
||||
Because of this, the ProgressBar class object won't display
|
||||
progress that is considered too fast, and progress where the time
|
||||
between steps is less than a second.
|
||||
|
||||
No printing must happen or the progress bar will be unintentionally
|
||||
destroyed.
|
||||
|
||||
Example usage::
|
||||
|
||||
with progressbar(items) as bar:
|
||||
for item in bar:
|
||||
do_something_with(item)
|
||||
|
||||
Alternatively, if no iterable is specified, one can manually update the
|
||||
progress bar through the `update()` method instead of directly
|
||||
iterating over the progress bar. The update method accepts the number
|
||||
of steps to increment the bar with::
|
||||
|
||||
with progressbar(length=chunks.total_bytes) as bar:
|
||||
for chunk in chunks:
|
||||
process_chunk(chunk)
|
||||
bar.update(chunks.bytes)
|
||||
|
||||
The ``update()`` method also takes an optional value specifying the
|
||||
``current_item`` at the new position. This is useful when used
|
||||
together with ``item_show_func`` to customize the output for each
|
||||
manual step::
|
||||
|
||||
with click.progressbar(
|
||||
length=total_size,
|
||||
label='Unzipping archive',
|
||||
item_show_func=lambda a: a.filename
|
||||
) as bar:
|
||||
for archive in zip_file:
|
||||
archive.extract()
|
||||
bar.update(archive.size, archive)
|
||||
|
||||
:param iterable: an iterable to iterate over. If not provided the length
|
||||
is required.
|
||||
:param length: the number of items to iterate over. By default the
|
||||
progressbar will attempt to ask the iterator about its
|
||||
length, which might or might not work. If an iterable is
|
||||
also provided this parameter can be used to override the
|
||||
length. If an iterable is not provided the progress bar
|
||||
will iterate over a range of that length.
|
||||
:param label: the label to show next to the progress bar.
|
||||
:param hidden: hide the progressbar. Defaults to ``False``. When no tty is
|
||||
detected, it will only print the progressbar label. Setting this to
|
||||
``False`` also disables that.
|
||||
:param show_eta: enables or disables the estimated time display. This is
|
||||
automatically disabled if the length cannot be
|
||||
determined.
|
||||
:param show_percent: enables or disables the percentage display. The
|
||||
default is `True` if the iterable has a length or
|
||||
`False` if not.
|
||||
:param show_pos: enables or disables the absolute position display. The
|
||||
default is `False`.
|
||||
:param item_show_func: A function called with the current item which
|
||||
can return a string to show next to the progress bar. If the
|
||||
function returns ``None`` nothing is shown. The current item can
|
||||
be ``None``, such as when entering and exiting the bar.
|
||||
:param fill_char: the character to use to show the filled part of the
|
||||
progress bar.
|
||||
:param empty_char: the character to use to show the non-filled part of
|
||||
the progress bar.
|
||||
:param bar_template: the format string to use as template for the bar.
|
||||
The parameters in it are ``label`` for the label,
|
||||
``bar`` for the progress bar and ``info`` for the
|
||||
info section.
|
||||
:param info_sep: the separator between multiple info items (eta etc.)
|
||||
:param width: the width of the progress bar in characters, 0 means full
|
||||
terminal width
|
||||
:param file: The file to write to. If this is not a terminal then
|
||||
only the label is printed.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection. This is only needed if ANSI
|
||||
codes are included anywhere in the progress bar output
|
||||
which is not the case by default.
|
||||
:param update_min_steps: Render only when this many updates have
|
||||
completed. This allows tuning for very fast iterators.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
The ``hidden`` argument.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Output is shown even if execution time is less than 0.5 seconds.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
``item_show_func`` shows the current item, not the previous one.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Labels are echoed if the output is not a TTY. Reverts a change
|
||||
in 7.0 that removed all output.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
The ``update_min_steps`` parameter.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter and ``update`` method.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
from ._termui_impl import ProgressBar
|
||||
|
||||
color = resolve_color_default(color)
|
||||
return ProgressBar(
|
||||
iterable=iterable,
|
||||
length=length,
|
||||
hidden=hidden,
|
||||
show_eta=show_eta,
|
||||
show_percent=show_percent,
|
||||
show_pos=show_pos,
|
||||
item_show_func=item_show_func,
|
||||
fill_char=fill_char,
|
||||
empty_char=empty_char,
|
||||
bar_template=bar_template,
|
||||
info_sep=info_sep,
|
||||
file=file,
|
||||
label=label,
|
||||
width=width,
|
||||
color=color,
|
||||
update_min_steps=update_min_steps,
|
||||
)
|
||||
|
||||
|
||||
def clear() -> None:
|
||||
"""Clears the terminal screen. This will have the effect of clearing
|
||||
the whole visible space of the terminal and moving the cursor to the
|
||||
top left. This does not do anything if not connected to a terminal.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isatty(sys.stdout):
|
||||
return
|
||||
|
||||
# ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor
|
||||
echo("\033[2J\033[1;1H", nl=False)
|
||||
|
||||
|
||||
def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str:
|
||||
if isinstance(color, int):
|
||||
return f"{38 + offset};5;{color:d}"
|
||||
|
||||
if isinstance(color, (tuple, list)):
|
||||
r, g, b = color
|
||||
return f"{38 + offset};2;{r:d};{g:d};{b:d}"
|
||||
|
||||
return str(_ansi_colors[color] + offset)
|
||||
|
||||
|
||||
def style(
|
||||
text: t.Any,
|
||||
fg: int | tuple[int, int, int] | str | None = None,
|
||||
bg: int | tuple[int, int, int] | str | None = None,
|
||||
bold: bool | None = None,
|
||||
dim: bool | None = None,
|
||||
underline: bool | None = None,
|
||||
overline: bool | None = None,
|
||||
italic: bool | None = None,
|
||||
blink: bool | None = None,
|
||||
reverse: bool | None = None,
|
||||
strikethrough: bool | None = None,
|
||||
reset: bool = True,
|
||||
) -> str:
|
||||
"""Styles a text with ANSI styles and returns the new string. By
|
||||
default the styling is self contained which means that at the end
|
||||
of the string a reset code is issued. This can be prevented by
|
||||
passing ``reset=False``.
|
||||
|
||||
Examples::
|
||||
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
click.echo(click.style('ATTENTION!', blink=True))
|
||||
click.echo(click.style('Some things', reverse=True, fg='cyan'))
|
||||
click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
|
||||
|
||||
Supported color names:
|
||||
|
||||
* ``black`` (might be a gray)
|
||||
* ``red``
|
||||
* ``green``
|
||||
* ``yellow`` (might be an orange)
|
||||
* ``blue``
|
||||
* ``magenta``
|
||||
* ``cyan``
|
||||
* ``white`` (might be light gray)
|
||||
* ``bright_black``
|
||||
* ``bright_red``
|
||||
* ``bright_green``
|
||||
* ``bright_yellow``
|
||||
* ``bright_blue``
|
||||
* ``bright_magenta``
|
||||
* ``bright_cyan``
|
||||
* ``bright_white``
|
||||
* ``reset`` (reset the color code only)
|
||||
|
||||
If the terminal supports it, color may also be specified as:
|
||||
|
||||
- An integer in the interval [0, 255]. The terminal must support
|
||||
8-bit/256-color mode.
|
||||
- An RGB tuple of three integers in [0, 255]. The terminal must
|
||||
support 24-bit/true-color mode.
|
||||
|
||||
See https://en.wikipedia.org/wiki/ANSI_color and
|
||||
https://gist.github.com/XVilka/8346728 for more information.
|
||||
|
||||
:param text: the string to style with ansi codes.
|
||||
:param fg: if provided this will become the foreground color.
|
||||
:param bg: if provided this will become the background color.
|
||||
:param bold: if provided this will enable or disable bold mode.
|
||||
:param dim: if provided this will enable or disable dim mode. This is
|
||||
badly supported.
|
||||
:param underline: if provided this will enable or disable underline.
|
||||
:param overline: if provided this will enable or disable overline.
|
||||
:param italic: if provided this will enable or disable italic.
|
||||
:param blink: if provided this will enable or disable blinking.
|
||||
:param reverse: if provided this will enable or disable inverse
|
||||
rendering (foreground becomes background and the
|
||||
other way round).
|
||||
:param strikethrough: if provided this will enable or disable
|
||||
striking through text.
|
||||
:param reset: by default a reset-all code is added at the end of the
|
||||
string which means that styles do not carry over. This
|
||||
can be disabled to compose styles.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
A non-string ``message`` is converted to a string.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added support for 256 and RGB color codes.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``strikethrough``, ``italic``, and ``overline``
|
||||
parameters.
|
||||
|
||||
.. versionchanged:: 7.0
|
||||
Added support for bright colors.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isinstance(text, str):
|
||||
text = str(text)
|
||||
|
||||
bits = []
|
||||
|
||||
if fg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(fg)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {fg!r}") from None
|
||||
|
||||
if bg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(bg, 10)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {bg!r}") from None
|
||||
|
||||
if bold is not None:
|
||||
bits.append(f"\033[{1 if bold else 22}m")
|
||||
if dim is not None:
|
||||
bits.append(f"\033[{2 if dim else 22}m")
|
||||
if underline is not None:
|
||||
bits.append(f"\033[{4 if underline else 24}m")
|
||||
if overline is not None:
|
||||
bits.append(f"\033[{53 if overline else 55}m")
|
||||
if italic is not None:
|
||||
bits.append(f"\033[{3 if italic else 23}m")
|
||||
if blink is not None:
|
||||
bits.append(f"\033[{5 if blink else 25}m")
|
||||
if reverse is not None:
|
||||
bits.append(f"\033[{7 if reverse else 27}m")
|
||||
if strikethrough is not None:
|
||||
bits.append(f"\033[{9 if strikethrough else 29}m")
|
||||
bits.append(text)
|
||||
if reset:
|
||||
bits.append(_ansi_reset_all)
|
||||
return "".join(bits)
|
||||
|
||||
|
||||
def unstyle(text: str) -> str:
|
||||
"""Removes ANSI styling information from a string. Usually it's not
|
||||
necessary to use this function as Click's echo function will
|
||||
automatically remove styling if necessary.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param text: the text to remove style information from.
|
||||
"""
|
||||
return strip_ansi(text)
|
||||
|
||||
|
||||
def secho(
|
||||
message: t.Any | None = None,
|
||||
file: t.IO[t.AnyStr] | None = None,
|
||||
nl: bool = True,
|
||||
err: bool = False,
|
||||
color: bool | None = None,
|
||||
**styles: t.Any,
|
||||
) -> None:
|
||||
"""This function combines :func:`echo` and :func:`style` into one
|
||||
call. As such the following two calls are the same::
|
||||
|
||||
click.secho('Hello World!', fg='green')
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
|
||||
All keyword arguments are forwarded to the underlying functions
|
||||
depending on which one they go with.
|
||||
|
||||
Non-string types will be converted to :class:`str`. However,
|
||||
:class:`bytes` are passed directly to :meth:`echo` without applying
|
||||
style. If you want to style bytes that represent text, call
|
||||
:meth:`bytes.decode` first.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
A non-string ``message`` is converted to a string. Bytes are
|
||||
passed through without style applied.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if message is not None and not isinstance(message, (bytes, bytearray)):
|
||||
message = style(message, **styles)
|
||||
|
||||
return echo(message, file=file, nl=nl, err=err, color=color)
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: bytes | bytearray,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = False,
|
||||
extension: str = ".txt",
|
||||
) -> bytes | None: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: str,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
) -> str | None: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: None = None,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
filename: str | cabc.Iterable[str] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
def edit(
|
||||
text: str | bytes | bytearray | None = None,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
filename: str | cabc.Iterable[str] | None = None,
|
||||
) -> str | bytes | bytearray | None:
|
||||
r"""Edits the given text in the defined editor. If an editor is given
|
||||
(should be the full path to the executable but the regular operating
|
||||
system search path is used for finding the executable) it overrides
|
||||
the detected editor. Optionally, some environment variables can be
|
||||
used. If the editor is closed without changes, `None` is returned. In
|
||||
case a file is edited directly the return value is always `None` and
|
||||
`require_save` and `extension` are ignored.
|
||||
|
||||
If the editor cannot be opened a :exc:`UsageError` is raised.
|
||||
|
||||
Note for Windows: to simplify cross-platform usage, the newlines are
|
||||
automatically converted from POSIX to Windows and vice versa. As such,
|
||||
the message here will have ``\n`` as newline markers.
|
||||
|
||||
:param text: the text to edit.
|
||||
:param editor: optionally the editor to use. Defaults to automatic
|
||||
detection.
|
||||
:param env: environment variables to forward to the editor.
|
||||
:param require_save: if this is true, then not saving in the editor
|
||||
will make the return value become `None`.
|
||||
:param extension: the extension to tell the editor about. This defaults
|
||||
to `.txt` but changing this might change syntax
|
||||
highlighting.
|
||||
:param filename: if provided it will edit this file instead of the
|
||||
provided text contents. It will not use a temporary
|
||||
file as an indirection in that case. If the editor supports
|
||||
editing multiple files at once, a sequence of files may be
|
||||
passed as well. Invoke `click.file` once per file instead
|
||||
if multiple files cannot be managed at once or editing the
|
||||
files serially is desired.
|
||||
|
||||
.. versionchanged:: 8.2.0
|
||||
``filename`` now accepts any ``Iterable[str]`` in addition to a ``str``
|
||||
if the ``editor`` supports editing multiple files at once.
|
||||
|
||||
"""
|
||||
from ._termui_impl import Editor
|
||||
|
||||
ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension)
|
||||
|
||||
if filename is None:
|
||||
return ed.edit(text)
|
||||
|
||||
if isinstance(filename, str):
|
||||
filename = (filename,)
|
||||
|
||||
ed.edit_files(filenames=filename)
|
||||
return None
|
||||
|
||||
|
||||
def launch(url: str, wait: bool = False, locate: bool = False) -> int:
|
||||
"""This function launches the given URL (or filename) in the default
|
||||
viewer application for this file type. If this is an executable, it
|
||||
might launch the executable in a new session. The return value is
|
||||
the exit code of the launched application. Usually, ``0`` indicates
|
||||
success.
|
||||
|
||||
Examples::
|
||||
|
||||
click.launch('https://click.palletsprojects.com/')
|
||||
click.launch('/my/downloaded/file', locate=True)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param url: URL or filename of the thing to launch.
|
||||
:param wait: Wait for the program to exit before returning. This
|
||||
only works if the launched program blocks. In particular,
|
||||
``xdg-open`` on Linux does not block.
|
||||
:param locate: if this is set to `True` then instead of launching the
|
||||
application associated with the URL it will attempt to
|
||||
launch a file manager with the file located. This
|
||||
might have weird effects if the URL does not point to
|
||||
the filesystem.
|
||||
"""
|
||||
from ._termui_impl import open_url
|
||||
|
||||
return open_url(url, wait=wait, locate=locate)
|
||||
|
||||
|
||||
# If this is provided, getchar() calls into this instead. This is used
|
||||
# for unittesting purposes.
|
||||
_getchar: t.Callable[[bool], str] | None = None
|
||||
|
||||
|
||||
def getchar(echo: bool = False) -> str:
|
||||
"""Fetches a single character from the terminal and returns it. This
|
||||
will always return a unicode character and under certain rare
|
||||
circumstances this might return more than one character. The
|
||||
situations which more than one character is returned is when for
|
||||
whatever reason multiple characters end up in the terminal buffer or
|
||||
standard input was not actually a terminal.
|
||||
|
||||
Note that this will always read from the terminal, even if something
|
||||
is piped into the standard input.
|
||||
|
||||
Note for Windows: in rare cases when typing non-ASCII characters, this
|
||||
function might wait for a second character and then return both at once.
|
||||
This is because certain Unicode characters look like special-key markers.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param echo: if set to `True`, the character read will also show up on
|
||||
the terminal. The default is to not show it.
|
||||
"""
|
||||
global _getchar
|
||||
|
||||
if _getchar is None:
|
||||
from ._termui_impl import getchar as f
|
||||
|
||||
_getchar = f
|
||||
|
||||
return _getchar(echo)
|
||||
|
||||
|
||||
def raw_terminal() -> AbstractContextManager[int]:
|
||||
from ._termui_impl import raw_terminal as f
|
||||
|
||||
return f()
|
||||
|
||||
|
||||
def pause(info: str | None = None, err: bool = False) -> None:
|
||||
"""This command stops execution and waits for the user to press any
|
||||
key to continue. This is similar to the Windows batch "pause"
|
||||
command. If the program is not run through a terminal, this command
|
||||
will instead do nothing.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param info: The message to print before pausing. Defaults to
|
||||
``"Press any key to continue..."``.
|
||||
:param err: if set to message goes to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
if not isatty(sys.stdin) or not isatty(sys.stdout):
|
||||
return
|
||||
|
||||
if info is None:
|
||||
info = _("Press any key to continue...")
|
||||
|
||||
try:
|
||||
if info:
|
||||
echo(info, nl=False, err=err)
|
||||
try:
|
||||
getchar()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
pass
|
||||
finally:
|
||||
if info:
|
||||
echo(err=err)
|
||||
577
tapdown/lib/python3.11/site-packages/click/testing.py
Normal file
577
tapdown/lib/python3.11/site-packages/click/testing.py
Normal file
@@ -0,0 +1,577 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import tempfile
|
||||
import typing as t
|
||||
from types import TracebackType
|
||||
|
||||
from . import _compat
|
||||
from . import formatting
|
||||
from . import termui
|
||||
from . import utils
|
||||
from ._compat import _find_binary_reader
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from _typeshed import ReadableBuffer
|
||||
|
||||
from .core import Command
|
||||
|
||||
|
||||
class EchoingStdin:
|
||||
def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None:
|
||||
self._input = input
|
||||
self._output = output
|
||||
self._paused = False
|
||||
|
||||
def __getattr__(self, x: str) -> t.Any:
|
||||
return getattr(self._input, x)
|
||||
|
||||
def _echo(self, rv: bytes) -> bytes:
|
||||
if not self._paused:
|
||||
self._output.write(rv)
|
||||
|
||||
return rv
|
||||
|
||||
def read(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.read(n))
|
||||
|
||||
def read1(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.read1(n)) # type: ignore
|
||||
|
||||
def readline(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.readline(n))
|
||||
|
||||
def readlines(self) -> list[bytes]:
|
||||
return [self._echo(x) for x in self._input.readlines()]
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[bytes]:
|
||||
return iter(self._echo(x) for x in self._input)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._input)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]:
|
||||
if stream is None:
|
||||
yield
|
||||
else:
|
||||
stream._paused = True
|
||||
yield
|
||||
stream._paused = False
|
||||
|
||||
|
||||
class BytesIOCopy(io.BytesIO):
|
||||
"""Patch ``io.BytesIO`` to let the written stream be copied to another.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
"""
|
||||
|
||||
def __init__(self, copy_to: io.BytesIO) -> None:
|
||||
super().__init__()
|
||||
self.copy_to = copy_to
|
||||
|
||||
def flush(self) -> None:
|
||||
super().flush()
|
||||
self.copy_to.flush()
|
||||
|
||||
def write(self, b: ReadableBuffer) -> int:
|
||||
self.copy_to.write(b)
|
||||
return super().write(b)
|
||||
|
||||
|
||||
class StreamMixer:
|
||||
"""Mixes `<stdout>` and `<stderr>` streams.
|
||||
|
||||
The result is available in the ``output`` attribute.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.output: io.BytesIO = io.BytesIO()
|
||||
self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output)
|
||||
self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output)
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""
|
||||
Guarantee that embedded file-like objects are closed in a
|
||||
predictable order, protecting against races between
|
||||
self.output being closed and other streams being flushed on close
|
||||
|
||||
.. versionadded:: 8.2.2
|
||||
"""
|
||||
self.stderr.close()
|
||||
self.stdout.close()
|
||||
self.output.close()
|
||||
|
||||
|
||||
class _NamedTextIOWrapper(io.TextIOWrapper):
|
||||
def __init__(
|
||||
self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any
|
||||
) -> None:
|
||||
super().__init__(buffer, **kwargs)
|
||||
self._name = name
|
||||
self._mode = mode
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def mode(self) -> str:
|
||||
return self._mode
|
||||
|
||||
|
||||
def make_input_stream(
|
||||
input: str | bytes | t.IO[t.Any] | None, charset: str
|
||||
) -> t.BinaryIO:
|
||||
# Is already an input stream.
|
||||
if hasattr(input, "read"):
|
||||
rv = _find_binary_reader(t.cast("t.IO[t.Any]", input))
|
||||
|
||||
if rv is not None:
|
||||
return rv
|
||||
|
||||
raise TypeError("Could not find binary reader for input stream.")
|
||||
|
||||
if input is None:
|
||||
input = b""
|
||||
elif isinstance(input, str):
|
||||
input = input.encode(charset)
|
||||
|
||||
return io.BytesIO(input)
|
||||
|
||||
|
||||
class Result:
|
||||
"""Holds the captured result of an invoked CLI script.
|
||||
|
||||
:param runner: The runner that created the result
|
||||
:param stdout_bytes: The standard output as bytes.
|
||||
:param stderr_bytes: The standard error as bytes.
|
||||
:param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the
|
||||
user would see it in its terminal.
|
||||
:param return_value: The value returned from the invoked command.
|
||||
:param exit_code: The exit code as integer.
|
||||
:param exception: The exception that happened if one did.
|
||||
:param exc_info: Exception information (exception type, exception instance,
|
||||
traceback type).
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
``stderr_bytes`` no longer optional, ``output_bytes`` introduced and
|
||||
``mix_stderr`` has been removed.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
Added ``return_value``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
stdout_bytes: bytes,
|
||||
stderr_bytes: bytes,
|
||||
output_bytes: bytes,
|
||||
return_value: t.Any,
|
||||
exit_code: int,
|
||||
exception: BaseException | None,
|
||||
exc_info: tuple[type[BaseException], BaseException, TracebackType]
|
||||
| None = None,
|
||||
):
|
||||
self.runner = runner
|
||||
self.stdout_bytes = stdout_bytes
|
||||
self.stderr_bytes = stderr_bytes
|
||||
self.output_bytes = output_bytes
|
||||
self.return_value = return_value
|
||||
self.exit_code = exit_code
|
||||
self.exception = exception
|
||||
self.exc_info = exc_info
|
||||
|
||||
@property
|
||||
def output(self) -> str:
|
||||
"""The terminal output as unicode string, as the user would see it.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
No longer a proxy for ``self.stdout``. Now has its own independent stream
|
||||
that is mixing `<stdout>` and `<stderr>`, in the order they were written.
|
||||
"""
|
||||
return self.output_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def stdout(self) -> str:
|
||||
"""The standard output as unicode string."""
|
||||
return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def stderr(self) -> str:
|
||||
"""The standard error as unicode string.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
No longer raise an exception, always returns the `<stderr>` string.
|
||||
"""
|
||||
return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
exc_str = repr(self.exception) if self.exception else "okay"
|
||||
return f"<{type(self).__name__} {exc_str}>"
|
||||
|
||||
|
||||
class CliRunner:
|
||||
"""The CLI runner provides functionality to invoke a Click command line
|
||||
script for unittesting purposes in a isolated environment. This only
|
||||
works in single-threaded systems without any concurrency as it changes the
|
||||
global interpreter state.
|
||||
|
||||
:param charset: the character set for the input and output data.
|
||||
:param env: a dictionary with environment variables for overriding.
|
||||
:param echo_stdin: if this is set to `True`, then reading from `<stdin>` writes
|
||||
to `<stdout>`. This is useful for showing examples in
|
||||
some circumstances. Note that regular prompts
|
||||
will automatically echo the input.
|
||||
:param catch_exceptions: Whether to catch any exceptions other than
|
||||
``SystemExit`` when running :meth:`~CliRunner.invoke`.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Added the ``catch_exceptions`` parameter.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
``mix_stderr`` parameter has been removed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
charset: str = "utf-8",
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
echo_stdin: bool = False,
|
||||
catch_exceptions: bool = True,
|
||||
) -> None:
|
||||
self.charset = charset
|
||||
self.env: cabc.Mapping[str, str | None] = env or {}
|
||||
self.echo_stdin = echo_stdin
|
||||
self.catch_exceptions = catch_exceptions
|
||||
|
||||
def get_default_prog_name(self, cli: Command) -> str:
|
||||
"""Given a command object it will return the default program name
|
||||
for it. The default is the `name` attribute or ``"root"`` if not
|
||||
set.
|
||||
"""
|
||||
return cli.name or "root"
|
||||
|
||||
def make_env(
|
||||
self, overrides: cabc.Mapping[str, str | None] | None = None
|
||||
) -> cabc.Mapping[str, str | None]:
|
||||
"""Returns the environment overrides for invoking a script."""
|
||||
rv = dict(self.env)
|
||||
if overrides:
|
||||
rv.update(overrides)
|
||||
return rv
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolation(
|
||||
self,
|
||||
input: str | bytes | t.IO[t.Any] | None = None,
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
color: bool = False,
|
||||
) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]:
|
||||
"""A context manager that sets up the isolation for invoking of a
|
||||
command line tool. This sets up `<stdin>` with the given input data
|
||||
and `os.environ` with the overrides from the given dictionary.
|
||||
This also rebinds some internals in Click to be mocked (like the
|
||||
prompt functionality).
|
||||
|
||||
This is automatically done in the :meth:`invoke` method.
|
||||
|
||||
:param input: the input stream to put into `sys.stdin`.
|
||||
:param env: the environment overrides as dictionary.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
An additional output stream is returned, which is a mix of
|
||||
`<stdout>` and `<stderr>` streams.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Always returns the `<stderr>` stream.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
`<stderr>` is opened with ``errors="backslashreplace"``
|
||||
instead of the default ``"strict"``.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
"""
|
||||
bytes_input = make_input_stream(input, self.charset)
|
||||
echo_input = None
|
||||
|
||||
old_stdin = sys.stdin
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
old_forced_width = formatting.FORCED_WIDTH
|
||||
formatting.FORCED_WIDTH = 80
|
||||
|
||||
env = self.make_env(env)
|
||||
|
||||
stream_mixer = StreamMixer()
|
||||
|
||||
if self.echo_stdin:
|
||||
bytes_input = echo_input = t.cast(
|
||||
t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout)
|
||||
)
|
||||
|
||||
sys.stdin = text_input = _NamedTextIOWrapper(
|
||||
bytes_input, encoding=self.charset, name="<stdin>", mode="r"
|
||||
)
|
||||
|
||||
if self.echo_stdin:
|
||||
# Force unbuffered reads, otherwise TextIOWrapper reads a
|
||||
# large chunk which is echoed early.
|
||||
text_input._CHUNK_SIZE = 1 # type: ignore
|
||||
|
||||
sys.stdout = _NamedTextIOWrapper(
|
||||
stream_mixer.stdout, encoding=self.charset, name="<stdout>", mode="w"
|
||||
)
|
||||
|
||||
sys.stderr = _NamedTextIOWrapper(
|
||||
stream_mixer.stderr,
|
||||
encoding=self.charset,
|
||||
name="<stderr>",
|
||||
mode="w",
|
||||
errors="backslashreplace",
|
||||
)
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def visible_input(prompt: str | None = None) -> str:
|
||||
sys.stdout.write(prompt or "")
|
||||
try:
|
||||
val = next(text_input).rstrip("\r\n")
|
||||
except StopIteration as e:
|
||||
raise EOFError() from e
|
||||
sys.stdout.write(f"{val}\n")
|
||||
sys.stdout.flush()
|
||||
return val
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def hidden_input(prompt: str | None = None) -> str:
|
||||
sys.stdout.write(f"{prompt or ''}\n")
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
return next(text_input).rstrip("\r\n")
|
||||
except StopIteration as e:
|
||||
raise EOFError() from e
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def _getchar(echo: bool) -> str:
|
||||
char = sys.stdin.read(1)
|
||||
|
||||
if echo:
|
||||
sys.stdout.write(char)
|
||||
|
||||
sys.stdout.flush()
|
||||
return char
|
||||
|
||||
default_color = color
|
||||
|
||||
def should_strip_ansi(
|
||||
stream: t.IO[t.Any] | None = None, color: bool | None = None
|
||||
) -> bool:
|
||||
if color is None:
|
||||
return not default_color
|
||||
return not color
|
||||
|
||||
old_visible_prompt_func = termui.visible_prompt_func
|
||||
old_hidden_prompt_func = termui.hidden_prompt_func
|
||||
old__getchar_func = termui._getchar
|
||||
old_should_strip_ansi = utils.should_strip_ansi # type: ignore
|
||||
old__compat_should_strip_ansi = _compat.should_strip_ansi
|
||||
termui.visible_prompt_func = visible_input
|
||||
termui.hidden_prompt_func = hidden_input
|
||||
termui._getchar = _getchar
|
||||
utils.should_strip_ansi = should_strip_ansi # type: ignore
|
||||
_compat.should_strip_ansi = should_strip_ansi
|
||||
|
||||
old_env = {}
|
||||
try:
|
||||
for key, value in env.items():
|
||||
old_env[key] = os.environ.get(key)
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output)
|
||||
finally:
|
||||
for key, value in old_env.items():
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
sys.stdin = old_stdin
|
||||
termui.visible_prompt_func = old_visible_prompt_func
|
||||
termui.hidden_prompt_func = old_hidden_prompt_func
|
||||
termui._getchar = old__getchar_func
|
||||
utils.should_strip_ansi = old_should_strip_ansi # type: ignore
|
||||
_compat.should_strip_ansi = old__compat_should_strip_ansi
|
||||
formatting.FORCED_WIDTH = old_forced_width
|
||||
|
||||
def invoke(
|
||||
self,
|
||||
cli: Command,
|
||||
args: str | cabc.Sequence[str] | None = None,
|
||||
input: str | bytes | t.IO[t.Any] | None = None,
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
catch_exceptions: bool | None = None,
|
||||
color: bool = False,
|
||||
**extra: t.Any,
|
||||
) -> Result:
|
||||
"""Invokes a command in an isolated environment. The arguments are
|
||||
forwarded directly to the command line script, the `extra` keyword
|
||||
arguments are passed to the :meth:`~clickpkg.Command.main` function of
|
||||
the command.
|
||||
|
||||
This returns a :class:`Result` object.
|
||||
|
||||
:param cli: the command to invoke
|
||||
:param args: the arguments to invoke. It may be given as an iterable
|
||||
or a string. When given as string it will be interpreted
|
||||
as a Unix shell command. More details at
|
||||
:func:`shlex.split`.
|
||||
:param input: the input data for `sys.stdin`.
|
||||
:param env: the environment overrides.
|
||||
:param catch_exceptions: Whether to catch any other exceptions than
|
||||
``SystemExit``. If :data:`None`, the value
|
||||
from :class:`CliRunner` is used.
|
||||
:param extra: the keyword arguments to pass to :meth:`main`.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
The result object has the ``output_bytes`` attribute with
|
||||
the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would
|
||||
see it in its terminal.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
The result object always returns the ``stderr_bytes`` stream.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
The result object has the ``return_value`` attribute with
|
||||
the value returned from the invoked command.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the ``catch_exceptions`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The result object has the ``exc_info`` attribute with the
|
||||
traceback if available.
|
||||
"""
|
||||
exc_info = None
|
||||
if catch_exceptions is None:
|
||||
catch_exceptions = self.catch_exceptions
|
||||
|
||||
with self.isolation(input=input, env=env, color=color) as outstreams:
|
||||
return_value = None
|
||||
exception: BaseException | None = None
|
||||
exit_code = 0
|
||||
|
||||
if isinstance(args, str):
|
||||
args = shlex.split(args)
|
||||
|
||||
try:
|
||||
prog_name = extra.pop("prog_name")
|
||||
except KeyError:
|
||||
prog_name = self.get_default_prog_name(cli)
|
||||
|
||||
try:
|
||||
return_value = cli.main(args=args or (), prog_name=prog_name, **extra)
|
||||
except SystemExit as e:
|
||||
exc_info = sys.exc_info()
|
||||
e_code = t.cast("int | t.Any | None", e.code)
|
||||
|
||||
if e_code is None:
|
||||
e_code = 0
|
||||
|
||||
if e_code != 0:
|
||||
exception = e
|
||||
|
||||
if not isinstance(e_code, int):
|
||||
sys.stdout.write(str(e_code))
|
||||
sys.stdout.write("\n")
|
||||
e_code = 1
|
||||
|
||||
exit_code = e_code
|
||||
|
||||
except Exception as e:
|
||||
if not catch_exceptions:
|
||||
raise
|
||||
exception = e
|
||||
exit_code = 1
|
||||
exc_info = sys.exc_info()
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
stdout = outstreams[0].getvalue()
|
||||
stderr = outstreams[1].getvalue()
|
||||
output = outstreams[2].getvalue()
|
||||
|
||||
return Result(
|
||||
runner=self,
|
||||
stdout_bytes=stdout,
|
||||
stderr_bytes=stderr,
|
||||
output_bytes=output,
|
||||
return_value=return_value,
|
||||
exit_code=exit_code,
|
||||
exception=exception,
|
||||
exc_info=exc_info, # type: ignore
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolated_filesystem(
|
||||
self, temp_dir: str | os.PathLike[str] | None = None
|
||||
) -> cabc.Iterator[str]:
|
||||
"""A context manager that creates a temporary directory and
|
||||
changes the current working directory to it. This isolates tests
|
||||
that affect the contents of the CWD to prevent them from
|
||||
interfering with each other.
|
||||
|
||||
:param temp_dir: Create the temporary directory under this
|
||||
directory. If given, the created directory is not removed
|
||||
when exiting.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``temp_dir`` parameter.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
dt = tempfile.mkdtemp(dir=temp_dir)
|
||||
os.chdir(dt)
|
||||
|
||||
try:
|
||||
yield dt
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
if temp_dir is None:
|
||||
import shutil
|
||||
|
||||
try:
|
||||
shutil.rmtree(dt)
|
||||
except OSError:
|
||||
pass
|
||||
1209
tapdown/lib/python3.11/site-packages/click/types.py
Normal file
1209
tapdown/lib/python3.11/site-packages/click/types.py
Normal file
File diff suppressed because it is too large
Load Diff
627
tapdown/lib/python3.11/site-packages/click/utils.py
Normal file
627
tapdown/lib/python3.11/site-packages/click/utils.py
Normal file
@@ -0,0 +1,627 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from types import ModuleType
|
||||
from types import TracebackType
|
||||
|
||||
from ._compat import _default_text_stderr
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import _find_binary_writer
|
||||
from ._compat import auto_wrap_for_ansi
|
||||
from ._compat import binary_streams
|
||||
from ._compat import open_stream
|
||||
from ._compat import should_strip_ansi
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import text_streams
|
||||
from ._compat import WIN
|
||||
from .globals import resolve_color_default
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
P = te.ParamSpec("P")
|
||||
|
||||
R = t.TypeVar("R")
|
||||
|
||||
|
||||
def _posixify(name: str) -> str:
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
|
||||
def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]:
|
||||
"""Wraps a function so that it swallows exceptions."""
|
||||
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def make_str(value: t.Any) -> str:
|
||||
"""Converts a value into a valid string."""
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
return value.decode(sys.getfilesystemencoding())
|
||||
except UnicodeError:
|
||||
return value.decode("utf-8", "replace")
|
||||
return str(value)
|
||||
|
||||
|
||||
def make_default_short_help(help: str, max_length: int = 45) -> str:
|
||||
"""Returns a condensed version of help string."""
|
||||
# Consider only the first paragraph.
|
||||
paragraph_end = help.find("\n\n")
|
||||
|
||||
if paragraph_end != -1:
|
||||
help = help[:paragraph_end]
|
||||
|
||||
# Collapse newlines, tabs, and spaces.
|
||||
words = help.split()
|
||||
|
||||
if not words:
|
||||
return ""
|
||||
|
||||
# The first paragraph started with a "no rewrap" marker, ignore it.
|
||||
if words[0] == "\b":
|
||||
words = words[1:]
|
||||
|
||||
total_length = 0
|
||||
last_index = len(words) - 1
|
||||
|
||||
for i, word in enumerate(words):
|
||||
total_length += len(word) + (i > 0)
|
||||
|
||||
if total_length > max_length: # too long, truncate
|
||||
break
|
||||
|
||||
if word[-1] == ".": # sentence end, truncate without "..."
|
||||
return " ".join(words[: i + 1])
|
||||
|
||||
if total_length == max_length and i != last_index:
|
||||
break # not at sentence end, truncate with "..."
|
||||
else:
|
||||
return " ".join(words) # no truncation needed
|
||||
|
||||
# Account for the length of the suffix.
|
||||
total_length += len("...")
|
||||
|
||||
# remove words until the length is short enough
|
||||
while i > 0:
|
||||
total_length -= len(words[i]) + (i > 0)
|
||||
|
||||
if total_length <= max_length:
|
||||
break
|
||||
|
||||
i -= 1
|
||||
|
||||
return " ".join(words[:i]) + "..."
|
||||
|
||||
|
||||
class LazyFile:
|
||||
"""A lazy file works like a regular file but it does not fully open
|
||||
the file but it does perform some basic checks early to see if the
|
||||
filename parameter does make sense. This is useful for safely opening
|
||||
files for writing.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
atomic: bool = False,
|
||||
):
|
||||
self.name: str = os.fspath(filename)
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.atomic = atomic
|
||||
self._f: t.IO[t.Any] | None
|
||||
self.should_close: bool
|
||||
|
||||
if self.name == "-":
|
||||
self._f, self.should_close = open_stream(filename, mode, encoding, errors)
|
||||
else:
|
||||
if "r" in mode:
|
||||
# Open and close the file in case we're opening it for
|
||||
# reading so that we can catch at least some errors in
|
||||
# some cases early.
|
||||
open(filename, mode).close()
|
||||
self._f = None
|
||||
self.should_close = True
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self.open(), name)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self._f is not None:
|
||||
return repr(self._f)
|
||||
return f"<unopened file '{format_filename(self.name)}' {self.mode}>"
|
||||
|
||||
def open(self) -> t.IO[t.Any]:
|
||||
"""Opens the file if it's not yet open. This call might fail with
|
||||
a :exc:`FileError`. Not handling this error will produce an error
|
||||
that Click shows.
|
||||
"""
|
||||
if self._f is not None:
|
||||
return self._f
|
||||
try:
|
||||
rv, self.should_close = open_stream(
|
||||
self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
|
||||
)
|
||||
except OSError as e:
|
||||
from .exceptions import FileError
|
||||
|
||||
raise FileError(self.name, hint=e.strerror) from e
|
||||
self._f = rv
|
||||
return rv
|
||||
|
||||
def close(self) -> None:
|
||||
"""Closes the underlying file, no matter what."""
|
||||
if self._f is not None:
|
||||
self._f.close()
|
||||
|
||||
def close_intelligently(self) -> None:
|
||||
"""This function only closes the file if it was opened by the lazy
|
||||
file wrapper. For instance this will never close stdin.
|
||||
"""
|
||||
if self.should_close:
|
||||
self.close()
|
||||
|
||||
def __enter__(self) -> LazyFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close_intelligently()
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[t.AnyStr]:
|
||||
self.open()
|
||||
return iter(self._f) # type: ignore
|
||||
|
||||
|
||||
class KeepOpenFile:
|
||||
def __init__(self, file: t.IO[t.Any]) -> None:
|
||||
self._file: t.IO[t.Any] = file
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._file, name)
|
||||
|
||||
def __enter__(self) -> KeepOpenFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._file)
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[t.AnyStr]:
|
||||
return iter(self._file)
|
||||
|
||||
|
||||
def echo(
|
||||
message: t.Any | None = None,
|
||||
file: t.IO[t.Any] | None = None,
|
||||
nl: bool = True,
|
||||
err: bool = False,
|
||||
color: bool | None = None,
|
||||
) -> None:
|
||||
"""Print a message and newline to stdout or a file. This should be
|
||||
used instead of :func:`print` because it provides better support
|
||||
for different data, files, and environments.
|
||||
|
||||
Compared to :func:`print`, this does the following:
|
||||
|
||||
- Ensures that the output encoding is not misconfigured on Linux.
|
||||
- Supports Unicode in the Windows console.
|
||||
- Supports writing to binary outputs, and supports writing bytes
|
||||
to text outputs.
|
||||
- Supports colors and styles on Windows.
|
||||
- Removes ANSI color and style codes if the output does not look
|
||||
like an interactive terminal.
|
||||
- Always flushes the output.
|
||||
|
||||
:param message: The string or bytes to output. Other objects are
|
||||
converted to strings.
|
||||
:param file: The file to write to. Defaults to ``stdout``.
|
||||
:param err: Write to ``stderr`` instead of ``stdout``.
|
||||
:param nl: Print a newline after the message. Enabled by default.
|
||||
:param color: Force showing or hiding colors and other styles. By
|
||||
default Click will remove color if the output does not look like
|
||||
an interactive terminal.
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
Support Unicode output on the Windows console. Click does not
|
||||
modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()``
|
||||
will still not support Unicode.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
Added the ``err`` parameter.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Support colors on Windows if colorama is installed.
|
||||
"""
|
||||
if file is None:
|
||||
if err:
|
||||
file = _default_text_stderr()
|
||||
else:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if file is None:
|
||||
return
|
||||
|
||||
# Convert non bytes/text into the native string type.
|
||||
if message is not None and not isinstance(message, (str, bytes, bytearray)):
|
||||
out: str | bytes | bytearray | None = str(message)
|
||||
else:
|
||||
out = message
|
||||
|
||||
if nl:
|
||||
out = out or ""
|
||||
if isinstance(out, str):
|
||||
out += "\n"
|
||||
else:
|
||||
out += b"\n"
|
||||
|
||||
if not out:
|
||||
file.flush()
|
||||
return
|
||||
|
||||
# If there is a message and the value looks like bytes, we manually
|
||||
# need to find the binary stream and write the message in there.
|
||||
# This is done separately so that most stream types will work as you
|
||||
# would expect. Eg: you can write to StringIO for other cases.
|
||||
if isinstance(out, (bytes, bytearray)):
|
||||
binary_file = _find_binary_writer(file)
|
||||
|
||||
if binary_file is not None:
|
||||
file.flush()
|
||||
binary_file.write(out)
|
||||
binary_file.flush()
|
||||
return
|
||||
|
||||
# ANSI style code support. For no message or bytes, nothing happens.
|
||||
# When outputting to a file instead of a terminal, strip codes.
|
||||
else:
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if should_strip_ansi(file, color):
|
||||
out = strip_ansi(out)
|
||||
elif WIN:
|
||||
if auto_wrap_for_ansi is not None:
|
||||
file = auto_wrap_for_ansi(file, color) # type: ignore
|
||||
elif not color:
|
||||
out = strip_ansi(out)
|
||||
|
||||
file.write(out) # type: ignore
|
||||
file.flush()
|
||||
|
||||
|
||||
def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO:
|
||||
"""Returns a system stream for byte processing.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
"""
|
||||
opener = binary_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError(f"Unknown standard stream '{name}'")
|
||||
return opener()
|
||||
|
||||
|
||||
def get_text_stream(
|
||||
name: t.Literal["stdin", "stdout", "stderr"],
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
) -> t.TextIO:
|
||||
"""Returns a system stream for text processing. This usually returns
|
||||
a wrapped stream around a binary stream returned from
|
||||
:func:`get_binary_stream` but it also can take shortcuts for already
|
||||
correctly configured streams.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
:param encoding: overrides the detected default encoding.
|
||||
:param errors: overrides the default error mode.
|
||||
"""
|
||||
opener = text_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError(f"Unknown standard stream '{name}'")
|
||||
return opener(encoding, errors)
|
||||
|
||||
|
||||
def open_file(
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
lazy: bool = False,
|
||||
atomic: bool = False,
|
||||
) -> t.IO[t.Any]:
|
||||
"""Open a file, with extra behavior to handle ``'-'`` to indicate
|
||||
a standard stream, lazy open on write, and atomic write. Similar to
|
||||
the behavior of the :class:`~click.File` param type.
|
||||
|
||||
If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is
|
||||
wrapped so that using it in a context manager will not close it.
|
||||
This makes it possible to use the function without accidentally
|
||||
closing a standard stream:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with open_file(filename) as f:
|
||||
...
|
||||
|
||||
:param filename: The name or Path of the file to open, or ``'-'`` for
|
||||
``stdin``/``stdout``.
|
||||
:param mode: The mode in which to open the file.
|
||||
:param encoding: The encoding to decode or encode a file opened in
|
||||
text mode.
|
||||
:param errors: The error handling mode.
|
||||
:param lazy: Wait to open the file until it is accessed. For read
|
||||
mode, the file is temporarily opened to raise access errors
|
||||
early, then closed until it is read again.
|
||||
:param atomic: Write to a temporary file and replace the given file
|
||||
on close.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
if lazy:
|
||||
return t.cast(
|
||||
"t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic)
|
||||
)
|
||||
|
||||
f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
|
||||
|
||||
if not should_close:
|
||||
f = t.cast("t.IO[t.Any]", KeepOpenFile(f))
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def format_filename(
|
||||
filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
|
||||
shorten: bool = False,
|
||||
) -> str:
|
||||
"""Format a filename as a string for display. Ensures the filename can be
|
||||
displayed by replacing any invalid bytes or surrogate escapes in the name
|
||||
with the replacement character ``<60>``.
|
||||
|
||||
Invalid bytes or surrogate escapes will raise an error when written to a
|
||||
stream with ``errors="strict"``. This will typically happen with ``stdout``
|
||||
when the locale is something like ``en_GB.UTF-8``.
|
||||
|
||||
Many scenarios *are* safe to write surrogates though, due to PEP 538 and
|
||||
PEP 540, including:
|
||||
|
||||
- Writing to ``stderr``, which uses ``errors="backslashreplace"``.
|
||||
- The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens
|
||||
stdout and stderr with ``errors="surrogateescape"``.
|
||||
- None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``.
|
||||
- Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``.
|
||||
Python opens stdout and stderr with ``errors="surrogateescape"``.
|
||||
|
||||
:param filename: formats a filename for UI display. This will also convert
|
||||
the filename into unicode without failing.
|
||||
:param shorten: this optionally shortens the filename to strip of the
|
||||
path that leads up to it.
|
||||
"""
|
||||
if shorten:
|
||||
filename = os.path.basename(filename)
|
||||
else:
|
||||
filename = os.fspath(filename)
|
||||
|
||||
if isinstance(filename, bytes):
|
||||
filename = filename.decode(sys.getfilesystemencoding(), "replace")
|
||||
else:
|
||||
filename = filename.encode("utf-8", "surrogateescape").decode(
|
||||
"utf-8", "replace"
|
||||
)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str:
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Windows (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Windows (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no effect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
if WIN:
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
)
|
||||
return os.path.join(
|
||||
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||
_posixify(app_name),
|
||||
)
|
||||
|
||||
|
||||
class PacifyFlushWrapper:
|
||||
"""This wrapper is used to catch and suppress BrokenPipeErrors resulting
|
||||
from ``.flush()`` being called on broken pipe during the shutdown/final-GC
|
||||
of the Python interpreter. Notably ``.flush()`` is always called on
|
||||
``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
|
||||
other cleanup code, and the case where the underlying file is not a broken
|
||||
pipe, all calls and attributes are proxied.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: t.IO[t.Any]) -> None:
|
||||
self.wrapped = wrapped
|
||||
|
||||
def flush(self) -> None:
|
||||
try:
|
||||
self.wrapped.flush()
|
||||
except OSError as e:
|
||||
import errno
|
||||
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def __getattr__(self, attr: str) -> t.Any:
|
||||
return getattr(self.wrapped, attr)
|
||||
|
||||
|
||||
def _detect_program_name(
|
||||
path: str | None = None, _main: ModuleType | None = None
|
||||
) -> str:
|
||||
"""Determine the command used to run the program, for use in help
|
||||
text. If a file or entry point was executed, the file name is
|
||||
returned. If ``python -m`` was used to execute a module or package,
|
||||
``python -m name`` is returned.
|
||||
|
||||
This doesn't try to be too precise, the goal is to give a concise
|
||||
name for help text. Files are only shown as their name without the
|
||||
path. ``python`` is only shown for modules, and the full path to
|
||||
``sys.executable`` is not shown.
|
||||
|
||||
:param path: The Python file being executed. Python puts this in
|
||||
``sys.argv[0]``, which is used by default.
|
||||
:param _main: The ``__main__`` module. This should only be passed
|
||||
during internal testing.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
Based on command args detection in the Werkzeug reloader.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
if _main is None:
|
||||
_main = sys.modules["__main__"]
|
||||
|
||||
if not path:
|
||||
path = sys.argv[0]
|
||||
|
||||
# The value of __package__ indicates how Python was called. It may
|
||||
# not exist if a setuptools script is installed as an egg. It may be
|
||||
# set incorrectly for entry points created with pip on Windows.
|
||||
# It is set to "" inside a Shiv or PEX zipapp.
|
||||
if getattr(_main, "__package__", None) in {None, ""} or (
|
||||
os.name == "nt"
|
||||
and _main.__package__ == ""
|
||||
and not os.path.exists(path)
|
||||
and os.path.exists(f"{path}.exe")
|
||||
):
|
||||
# Executed a file, like "python app.py".
|
||||
return os.path.basename(path)
|
||||
|
||||
# Executed a module, like "python -m example".
|
||||
# Rewritten by Python from "-m script" to "/path/to/script.py".
|
||||
# Need to look at main module to determine how it was executed.
|
||||
py_module = t.cast(str, _main.__package__)
|
||||
name = os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# A submodule like "example.cli".
|
||||
if name != "__main__":
|
||||
py_module = f"{py_module}.{name}"
|
||||
|
||||
return f"python -m {py_module.lstrip('.')}"
|
||||
|
||||
|
||||
def _expand_args(
|
||||
args: cabc.Iterable[str],
|
||||
*,
|
||||
user: bool = True,
|
||||
env: bool = True,
|
||||
glob_recursive: bool = True,
|
||||
) -> list[str]:
|
||||
"""Simulate Unix shell expansion with Python functions.
|
||||
|
||||
See :func:`glob.glob`, :func:`os.path.expanduser`, and
|
||||
:func:`os.path.expandvars`.
|
||||
|
||||
This is intended for use on Windows, where the shell does not do any
|
||||
expansion. It may not exactly match what a Unix shell would do.
|
||||
|
||||
:param args: List of command line arguments to expand.
|
||||
:param user: Expand user home directory.
|
||||
:param env: Expand environment variables.
|
||||
:param glob_recursive: ``**`` matches directories recursively.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
Invalid glob patterns are treated as empty expansions rather
|
||||
than raising an error.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
from glob import glob
|
||||
|
||||
out = []
|
||||
|
||||
for arg in args:
|
||||
if user:
|
||||
arg = os.path.expanduser(arg)
|
||||
|
||||
if env:
|
||||
arg = os.path.expandvars(arg)
|
||||
|
||||
try:
|
||||
matches = glob(arg, recursive=glob_recursive)
|
||||
except re.error:
|
||||
matches = []
|
||||
|
||||
if not matches:
|
||||
out.append(arg)
|
||||
else:
|
||||
out.extend(matches)
|
||||
|
||||
return out
|
||||
24
tapdown/lib/python3.11/site-packages/dateutil/__init__.py
Normal file
24
tapdown/lib/python3.11/site-packages/dateutil/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
|
||||
try:
|
||||
from ._version import version as __version__
|
||||
except ImportError:
|
||||
__version__ = 'unknown'
|
||||
|
||||
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
||||
'utils', 'zoneinfo']
|
||||
|
||||
def __getattr__(name):
|
||||
import importlib
|
||||
|
||||
if name in __all__:
|
||||
return importlib.import_module("." + name, __name__)
|
||||
raise AttributeError(
|
||||
"module {!r} has not attribute {!r}".format(__name__, name)
|
||||
)
|
||||
|
||||
|
||||
def __dir__():
|
||||
# __dir__ should include all the lazy-importable modules as well.
|
||||
return [x for x in globals() if x not in sys.modules] + __all__
|
||||
43
tapdown/lib/python3.11/site-packages/dateutil/_common.py
Normal file
43
tapdown/lib/python3.11/site-packages/dateutil/_common.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Common code used in multiple modules.
|
||||
"""
|
||||
|
||||
|
||||
class weekday(object):
|
||||
__slots__ = ["weekday", "n"]
|
||||
|
||||
def __init__(self, weekday, n=None):
|
||||
self.weekday = weekday
|
||||
self.n = n
|
||||
|
||||
def __call__(self, n):
|
||||
if n == self.n:
|
||||
return self
|
||||
else:
|
||||
return self.__class__(self.weekday, n)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
if self.weekday != other.weekday or self.n != other.n:
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
self.weekday,
|
||||
self.n,
|
||||
))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __repr__(self):
|
||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
||||
if not self.n:
|
||||
return s
|
||||
else:
|
||||
return "%s(%+d)" % (s, self.n)
|
||||
|
||||
# vim:ts=4:sw=4:et
|
||||
@@ -0,0 +1,4 @@
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
__version__ = version = '2.9.0.post0'
|
||||
__version_tuple__ = version_tuple = (2, 9, 0)
|
||||
89
tapdown/lib/python3.11/site-packages/dateutil/easter.py
Normal file
89
tapdown/lib/python3.11/site-packages/dateutil/easter.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a generic Easter computing method for any given year, using
|
||||
Western, Orthodox or Julian algorithms.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
|
||||
|
||||
EASTER_JULIAN = 1
|
||||
EASTER_ORTHODOX = 2
|
||||
EASTER_WESTERN = 3
|
||||
|
||||
|
||||
def easter(year, method=EASTER_WESTERN):
|
||||
"""
|
||||
This method was ported from the work done by GM Arts,
|
||||
on top of the algorithm by Claus Tondering, which was
|
||||
based in part on the algorithm of Ouding (1940), as
|
||||
quoted in "Explanatory Supplement to the Astronomical
|
||||
Almanac", P. Kenneth Seidelmann, editor.
|
||||
|
||||
This algorithm implements three different Easter
|
||||
calculation methods:
|
||||
|
||||
1. Original calculation in Julian calendar, valid in
|
||||
dates after 326 AD
|
||||
2. Original method, with date converted to Gregorian
|
||||
calendar, valid in years 1583 to 4099
|
||||
3. Revised method, in Gregorian calendar, valid in
|
||||
years 1583 to 4099 as well
|
||||
|
||||
These methods are represented by the constants:
|
||||
|
||||
* ``EASTER_JULIAN = 1``
|
||||
* ``EASTER_ORTHODOX = 2``
|
||||
* ``EASTER_WESTERN = 3``
|
||||
|
||||
The default method is method 3.
|
||||
|
||||
More about the algorithm may be found at:
|
||||
|
||||
`GM Arts: Easter Algorithms <http://www.gmarts.org/index.php?go=415>`_
|
||||
|
||||
and
|
||||
|
||||
`The Calendar FAQ: Easter <https://www.tondering.dk/claus/cal/easter.php>`_
|
||||
|
||||
"""
|
||||
|
||||
if not (1 <= method <= 3):
|
||||
raise ValueError("invalid method")
|
||||
|
||||
# g - Golden year - 1
|
||||
# c - Century
|
||||
# h - (23 - Epact) mod 30
|
||||
# i - Number of days from March 21 to Paschal Full Moon
|
||||
# j - Weekday for PFM (0=Sunday, etc)
|
||||
# p - Number of days from March 21 to Sunday on or before PFM
|
||||
# (-6 to 28 methods 1 & 3, to 56 for method 2)
|
||||
# e - Extra days to add for method 2 (converting Julian
|
||||
# date to Gregorian date)
|
||||
|
||||
y = year
|
||||
g = y % 19
|
||||
e = 0
|
||||
if method < 3:
|
||||
# Old method
|
||||
i = (19*g + 15) % 30
|
||||
j = (y + y//4 + i) % 7
|
||||
if method == 2:
|
||||
# Extra dates to convert Julian to Gregorian date
|
||||
e = 10
|
||||
if y > 1600:
|
||||
e = e + y//100 - 16 - (y//100 - 16)//4
|
||||
else:
|
||||
# New method
|
||||
c = y//100
|
||||
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
|
||||
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
|
||||
j = (y + y//4 + i + 2 - c + c//4) % 7
|
||||
|
||||
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
|
||||
# (later dates apply to method 2, although 23 May never actually occurs)
|
||||
p = i - j + e
|
||||
d = 1 + (p + 27 + (p + 6)//40) % 31
|
||||
m = 3 + (p + 26)//30
|
||||
return datetime.date(int(y), int(m), int(d))
|
||||
@@ -0,0 +1,61 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._parser import parse, parser, parserinfo, ParserError
|
||||
from ._parser import DEFAULTPARSER, DEFAULTTZPARSER
|
||||
from ._parser import UnknownTimezoneWarning
|
||||
|
||||
from ._parser import __doc__
|
||||
|
||||
from .isoparser import isoparser, isoparse
|
||||
|
||||
__all__ = ['parse', 'parser', 'parserinfo',
|
||||
'isoparse', 'isoparser',
|
||||
'ParserError',
|
||||
'UnknownTimezoneWarning']
|
||||
|
||||
|
||||
###
|
||||
# Deprecate portions of the private interface so that downstream code that
|
||||
# is improperly relying on it is given *some* notice.
|
||||
|
||||
|
||||
def __deprecated_private_func(f):
|
||||
from functools import wraps
|
||||
import warnings
|
||||
|
||||
msg = ('{name} is a private function and may break without warning, '
|
||||
'it will be moved and or renamed in future versions.')
|
||||
msg = msg.format(name=f.__name__)
|
||||
|
||||
@wraps(f)
|
||||
def deprecated_func(*args, **kwargs):
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return deprecated_func
|
||||
|
||||
def __deprecate_private_class(c):
|
||||
import warnings
|
||||
|
||||
msg = ('{name} is a private class and may break without warning, '
|
||||
'it will be moved and or renamed in future versions.')
|
||||
msg = msg.format(name=c.__name__)
|
||||
|
||||
class private_class(c):
|
||||
__doc__ = c.__doc__
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
super(private_class, self).__init__(*args, **kwargs)
|
||||
|
||||
private_class.__name__ = c.__name__
|
||||
|
||||
return private_class
|
||||
|
||||
|
||||
from ._parser import _timelex, _resultbase
|
||||
from ._parser import _tzparser, _parsetz
|
||||
|
||||
_timelex = __deprecate_private_class(_timelex)
|
||||
_tzparser = __deprecate_private_class(_tzparser)
|
||||
_resultbase = __deprecate_private_class(_resultbase)
|
||||
_parsetz = __deprecated_private_func(_parsetz)
|
||||
1613
tapdown/lib/python3.11/site-packages/dateutil/parser/_parser.py
Normal file
1613
tapdown/lib/python3.11/site-packages/dateutil/parser/_parser.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,416 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a parser for ISO-8601 strings
|
||||
|
||||
It is intended to support all valid date, time and datetime formats per the
|
||||
ISO-8601 specification.
|
||||
|
||||
..versionadded:: 2.7.0
|
||||
"""
|
||||
from datetime import datetime, timedelta, time, date
|
||||
import calendar
|
||||
from dateutil import tz
|
||||
|
||||
from functools import wraps
|
||||
|
||||
import re
|
||||
import six
|
||||
|
||||
__all__ = ["isoparse", "isoparser"]
|
||||
|
||||
|
||||
def _takes_ascii(f):
|
||||
@wraps(f)
|
||||
def func(self, str_in, *args, **kwargs):
|
||||
# If it's a stream, read the whole thing
|
||||
str_in = getattr(str_in, 'read', lambda: str_in)()
|
||||
|
||||
# If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII
|
||||
if isinstance(str_in, six.text_type):
|
||||
# ASCII is the same in UTF-8
|
||||
try:
|
||||
str_in = str_in.encode('ascii')
|
||||
except UnicodeEncodeError as e:
|
||||
msg = 'ISO-8601 strings should contain only ASCII characters'
|
||||
six.raise_from(ValueError(msg), e)
|
||||
|
||||
return f(self, str_in, *args, **kwargs)
|
||||
|
||||
return func
|
||||
|
||||
|
||||
class isoparser(object):
|
||||
def __init__(self, sep=None):
|
||||
"""
|
||||
:param sep:
|
||||
A single character that separates date and time portions. If
|
||||
``None``, the parser will accept any single character.
|
||||
For strict ISO-8601 adherence, pass ``'T'``.
|
||||
"""
|
||||
if sep is not None:
|
||||
if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):
|
||||
raise ValueError('Separator must be a single, non-numeric ' +
|
||||
'ASCII character')
|
||||
|
||||
sep = sep.encode('ascii')
|
||||
|
||||
self._sep = sep
|
||||
|
||||
@_takes_ascii
|
||||
def isoparse(self, dt_str):
|
||||
"""
|
||||
Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.
|
||||
|
||||
An ISO-8601 datetime string consists of a date portion, followed
|
||||
optionally by a time portion - the date and time portions are separated
|
||||
by a single character separator, which is ``T`` in the official
|
||||
standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be
|
||||
combined with a time portion.
|
||||
|
||||
Supported date formats are:
|
||||
|
||||
Common:
|
||||
|
||||
- ``YYYY``
|
||||
- ``YYYY-MM``
|
||||
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
||||
|
||||
Uncommon:
|
||||
|
||||
- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)
|
||||
- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day
|
||||
|
||||
The ISO week and day numbering follows the same logic as
|
||||
:func:`datetime.date.isocalendar`.
|
||||
|
||||
Supported time formats are:
|
||||
|
||||
- ``hh``
|
||||
- ``hh:mm`` or ``hhmm``
|
||||
- ``hh:mm:ss`` or ``hhmmss``
|
||||
- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)
|
||||
|
||||
Midnight is a special case for `hh`, as the standard supports both
|
||||
00:00 and 24:00 as a representation. The decimal separator can be
|
||||
either a dot or a comma.
|
||||
|
||||
|
||||
.. caution::
|
||||
|
||||
Support for fractional components other than seconds is part of the
|
||||
ISO-8601 standard, but is not currently implemented in this parser.
|
||||
|
||||
Supported time zone offset formats are:
|
||||
|
||||
- `Z` (UTC)
|
||||
- `±HH:MM`
|
||||
- `±HHMM`
|
||||
- `±HH`
|
||||
|
||||
Offsets will be represented as :class:`dateutil.tz.tzoffset` objects,
|
||||
with the exception of UTC, which will be represented as
|
||||
:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such
|
||||
as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.
|
||||
|
||||
:param dt_str:
|
||||
A string or stream containing only an ISO-8601 datetime string
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.datetime` representing the string.
|
||||
Unspecified components default to their lowest value.
|
||||
|
||||
.. warning::
|
||||
|
||||
As of version 2.7.0, the strictness of the parser should not be
|
||||
considered a stable part of the contract. Any valid ISO-8601 string
|
||||
that parses correctly with the default settings will continue to
|
||||
parse correctly in future versions, but invalid strings that
|
||||
currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not
|
||||
guaranteed to continue failing in future versions if they encode
|
||||
a valid date.
|
||||
|
||||
.. versionadded:: 2.7.0
|
||||
"""
|
||||
components, pos = self._parse_isodate(dt_str)
|
||||
|
||||
if len(dt_str) > pos:
|
||||
if self._sep is None or dt_str[pos:pos + 1] == self._sep:
|
||||
components += self._parse_isotime(dt_str[pos + 1:])
|
||||
else:
|
||||
raise ValueError('String contains unknown ISO components')
|
||||
|
||||
if len(components) > 3 and components[3] == 24:
|
||||
components[3] = 0
|
||||
return datetime(*components) + timedelta(days=1)
|
||||
|
||||
return datetime(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_isodate(self, datestr):
|
||||
"""
|
||||
Parse the date portion of an ISO string.
|
||||
|
||||
:param datestr:
|
||||
The string portion of an ISO string, without a separator
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.date` object
|
||||
"""
|
||||
components, pos = self._parse_isodate(datestr)
|
||||
if pos < len(datestr):
|
||||
raise ValueError('String contains unknown ISO ' +
|
||||
'components: {!r}'.format(datestr.decode('ascii')))
|
||||
return date(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_isotime(self, timestr):
|
||||
"""
|
||||
Parse the time portion of an ISO string.
|
||||
|
||||
:param timestr:
|
||||
The time portion of an ISO string, without a separator
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.time` object
|
||||
"""
|
||||
components = self._parse_isotime(timestr)
|
||||
if components[0] == 24:
|
||||
components[0] = 0
|
||||
return time(*components)
|
||||
|
||||
@_takes_ascii
|
||||
def parse_tzstr(self, tzstr, zero_as_utc=True):
|
||||
"""
|
||||
Parse a valid ISO time zone string.
|
||||
|
||||
See :func:`isoparser.isoparse` for details on supported formats.
|
||||
|
||||
:param tzstr:
|
||||
A string representing an ISO time zone offset
|
||||
|
||||
:param zero_as_utc:
|
||||
Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones
|
||||
|
||||
:return:
|
||||
Returns :class:`dateutil.tz.tzoffset` for offsets and
|
||||
:class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is
|
||||
specified) offsets equivalent to UTC.
|
||||
"""
|
||||
return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc)
|
||||
|
||||
# Constants
|
||||
_DATE_SEP = b'-'
|
||||
_TIME_SEP = b':'
|
||||
_FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)')
|
||||
|
||||
def _parse_isodate(self, dt_str):
|
||||
try:
|
||||
return self._parse_isodate_common(dt_str)
|
||||
except ValueError:
|
||||
return self._parse_isodate_uncommon(dt_str)
|
||||
|
||||
def _parse_isodate_common(self, dt_str):
|
||||
len_str = len(dt_str)
|
||||
components = [1, 1, 1]
|
||||
|
||||
if len_str < 4:
|
||||
raise ValueError('ISO string too short')
|
||||
|
||||
# Year
|
||||
components[0] = int(dt_str[0:4])
|
||||
pos = 4
|
||||
if pos >= len_str:
|
||||
return components, pos
|
||||
|
||||
has_sep = dt_str[pos:pos + 1] == self._DATE_SEP
|
||||
if has_sep:
|
||||
pos += 1
|
||||
|
||||
# Month
|
||||
if len_str - pos < 2:
|
||||
raise ValueError('Invalid common month')
|
||||
|
||||
components[1] = int(dt_str[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
if pos >= len_str:
|
||||
if has_sep:
|
||||
return components, pos
|
||||
else:
|
||||
raise ValueError('Invalid ISO format')
|
||||
|
||||
if has_sep:
|
||||
if dt_str[pos:pos + 1] != self._DATE_SEP:
|
||||
raise ValueError('Invalid separator in ISO string')
|
||||
pos += 1
|
||||
|
||||
# Day
|
||||
if len_str - pos < 2:
|
||||
raise ValueError('Invalid common day')
|
||||
components[2] = int(dt_str[pos:pos + 2])
|
||||
return components, pos + 2
|
||||
|
||||
def _parse_isodate_uncommon(self, dt_str):
|
||||
if len(dt_str) < 4:
|
||||
raise ValueError('ISO string too short')
|
||||
|
||||
# All ISO formats start with the year
|
||||
year = int(dt_str[0:4])
|
||||
|
||||
has_sep = dt_str[4:5] == self._DATE_SEP
|
||||
|
||||
pos = 4 + has_sep # Skip '-' if it's there
|
||||
if dt_str[pos:pos + 1] == b'W':
|
||||
# YYYY-?Www-?D?
|
||||
pos += 1
|
||||
weekno = int(dt_str[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
dayno = 1
|
||||
if len(dt_str) > pos:
|
||||
if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep:
|
||||
raise ValueError('Inconsistent use of dash separator')
|
||||
|
||||
pos += has_sep
|
||||
|
||||
dayno = int(dt_str[pos:pos + 1])
|
||||
pos += 1
|
||||
|
||||
base_date = self._calculate_weekdate(year, weekno, dayno)
|
||||
else:
|
||||
# YYYYDDD or YYYY-DDD
|
||||
if len(dt_str) - pos < 3:
|
||||
raise ValueError('Invalid ordinal day')
|
||||
|
||||
ordinal_day = int(dt_str[pos:pos + 3])
|
||||
pos += 3
|
||||
|
||||
if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)):
|
||||
raise ValueError('Invalid ordinal day' +
|
||||
' {} for year {}'.format(ordinal_day, year))
|
||||
|
||||
base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1)
|
||||
|
||||
components = [base_date.year, base_date.month, base_date.day]
|
||||
return components, pos
|
||||
|
||||
def _calculate_weekdate(self, year, week, day):
|
||||
"""
|
||||
Calculate the day of corresponding to the ISO year-week-day calendar.
|
||||
|
||||
This function is effectively the inverse of
|
||||
:func:`datetime.date.isocalendar`.
|
||||
|
||||
:param year:
|
||||
The year in the ISO calendar
|
||||
|
||||
:param week:
|
||||
The week in the ISO calendar - range is [1, 53]
|
||||
|
||||
:param day:
|
||||
The day in the ISO calendar - range is [1 (MON), 7 (SUN)]
|
||||
|
||||
:return:
|
||||
Returns a :class:`datetime.date`
|
||||
"""
|
||||
if not 0 < week < 54:
|
||||
raise ValueError('Invalid week: {}'.format(week))
|
||||
|
||||
if not 0 < day < 8: # Range is 1-7
|
||||
raise ValueError('Invalid weekday: {}'.format(day))
|
||||
|
||||
# Get week 1 for the specific year:
|
||||
jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it
|
||||
week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1)
|
||||
|
||||
# Now add the specific number of weeks and days to get what we want
|
||||
week_offset = (week - 1) * 7 + (day - 1)
|
||||
return week_1 + timedelta(days=week_offset)
|
||||
|
||||
def _parse_isotime(self, timestr):
|
||||
len_str = len(timestr)
|
||||
components = [0, 0, 0, 0, None]
|
||||
pos = 0
|
||||
comp = -1
|
||||
|
||||
if len_str < 2:
|
||||
raise ValueError('ISO time too short')
|
||||
|
||||
has_sep = False
|
||||
|
||||
while pos < len_str and comp < 5:
|
||||
comp += 1
|
||||
|
||||
if timestr[pos:pos + 1] in b'-+Zz':
|
||||
# Detect time zone boundary
|
||||
components[-1] = self._parse_tzstr(timestr[pos:])
|
||||
pos = len_str
|
||||
break
|
||||
|
||||
if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP:
|
||||
has_sep = True
|
||||
pos += 1
|
||||
elif comp == 2 and has_sep:
|
||||
if timestr[pos:pos+1] != self._TIME_SEP:
|
||||
raise ValueError('Inconsistent use of colon separator')
|
||||
pos += 1
|
||||
|
||||
if comp < 3:
|
||||
# Hour, minute, second
|
||||
components[comp] = int(timestr[pos:pos + 2])
|
||||
pos += 2
|
||||
|
||||
if comp == 3:
|
||||
# Fraction of a second
|
||||
frac = self._FRACTION_REGEX.match(timestr[pos:])
|
||||
if not frac:
|
||||
continue
|
||||
|
||||
us_str = frac.group(1)[:6] # Truncate to microseconds
|
||||
components[comp] = int(us_str) * 10**(6 - len(us_str))
|
||||
pos += len(frac.group())
|
||||
|
||||
if pos < len_str:
|
||||
raise ValueError('Unused components in ISO string')
|
||||
|
||||
if components[0] == 24:
|
||||
# Standard supports 00:00 and 24:00 as representations of midnight
|
||||
if any(component != 0 for component in components[1:4]):
|
||||
raise ValueError('Hour may only be 24 at 24:00:00.000')
|
||||
|
||||
return components
|
||||
|
||||
def _parse_tzstr(self, tzstr, zero_as_utc=True):
|
||||
if tzstr == b'Z' or tzstr == b'z':
|
||||
return tz.UTC
|
||||
|
||||
if len(tzstr) not in {3, 5, 6}:
|
||||
raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')
|
||||
|
||||
if tzstr[0:1] == b'-':
|
||||
mult = -1
|
||||
elif tzstr[0:1] == b'+':
|
||||
mult = 1
|
||||
else:
|
||||
raise ValueError('Time zone offset requires sign')
|
||||
|
||||
hours = int(tzstr[1:3])
|
||||
if len(tzstr) == 3:
|
||||
minutes = 0
|
||||
else:
|
||||
minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])
|
||||
|
||||
if zero_as_utc and hours == 0 and minutes == 0:
|
||||
return tz.UTC
|
||||
else:
|
||||
if minutes > 59:
|
||||
raise ValueError('Invalid minutes in time zone offset')
|
||||
|
||||
if hours > 23:
|
||||
raise ValueError('Invalid hours in time zone offset')
|
||||
|
||||
return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
|
||||
|
||||
|
||||
DEFAULT_ISOPARSER = isoparser()
|
||||
isoparse = DEFAULT_ISOPARSER.isoparse
|
||||
599
tapdown/lib/python3.11/site-packages/dateutil/relativedelta.py
Normal file
599
tapdown/lib/python3.11/site-packages/dateutil/relativedelta.py
Normal file
@@ -0,0 +1,599 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
import operator
|
||||
from math import copysign
|
||||
|
||||
from six import integer_types
|
||||
from warnings import warn
|
||||
|
||||
from ._common import weekday
|
||||
|
||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
|
||||
|
||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
||||
|
||||
|
||||
class relativedelta(object):
|
||||
"""
|
||||
The relativedelta type is designed to be applied to an existing datetime and
|
||||
can replace specific components of that datetime, or represents an interval
|
||||
of time.
|
||||
|
||||
It is based on the specification of the excellent work done by M.-A. Lemburg
|
||||
in his
|
||||
`mx.DateTime <https://www.egenix.com/products/python/mxBase/mxDateTime/>`_ extension.
|
||||
However, notice that this type does *NOT* implement the same algorithm as
|
||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
||||
|
||||
There are two different ways to build a relativedelta instance. The
|
||||
first one is passing it two date/datetime classes::
|
||||
|
||||
relativedelta(datetime1, datetime2)
|
||||
|
||||
The second one is passing it any number of the following keyword arguments::
|
||||
|
||||
relativedelta(arg1=x,arg2=y,arg3=z...)
|
||||
|
||||
year, month, day, hour, minute, second, microsecond:
|
||||
Absolute information (argument is singular); adding or subtracting a
|
||||
relativedelta with absolute information does not perform an arithmetic
|
||||
operation, but rather REPLACES the corresponding value in the
|
||||
original datetime with the value(s) in relativedelta.
|
||||
|
||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
||||
Relative information, may be negative (argument is plural); adding
|
||||
or subtracting a relativedelta with relative information performs
|
||||
the corresponding arithmetic operation on the original datetime value
|
||||
with the information in the relativedelta.
|
||||
|
||||
weekday:
|
||||
One of the weekday instances (MO, TU, etc) available in the
|
||||
relativedelta module. These instances may receive a parameter N,
|
||||
specifying the Nth weekday, which could be positive or negative
|
||||
(like MO(+1) or MO(-2)). Not specifying it is the same as specifying
|
||||
+1. You can also use an integer, where 0=MO. This argument is always
|
||||
relative e.g. if the calculated date is already Monday, using MO(1)
|
||||
or MO(-1) won't change the day. To effectively make it absolute, use
|
||||
it in combination with the day argument (e.g. day=1, MO(1) for first
|
||||
Monday of the month).
|
||||
|
||||
leapdays:
|
||||
Will add given days to the date found, if year is a leap
|
||||
year, and the date found is post 28 of february.
|
||||
|
||||
yearday, nlyearday:
|
||||
Set the yearday or the non-leap year day (jump leap days).
|
||||
These are converted to day/month/leapdays information.
|
||||
|
||||
There are relative and absolute forms of the keyword
|
||||
arguments. The plural is relative, and the singular is
|
||||
absolute. For each argument in the order below, the absolute form
|
||||
is applied first (by setting each attribute to that value) and
|
||||
then the relative form (by adding the value to the attribute).
|
||||
|
||||
The order of attributes considered when this relativedelta is
|
||||
added to a datetime is:
|
||||
|
||||
1. Year
|
||||
2. Month
|
||||
3. Day
|
||||
4. Hours
|
||||
5. Minutes
|
||||
6. Seconds
|
||||
7. Microseconds
|
||||
|
||||
Finally, weekday is applied, using the rule described above.
|
||||
|
||||
For example
|
||||
|
||||
>>> from datetime import datetime
|
||||
>>> from dateutil.relativedelta import relativedelta, MO
|
||||
>>> dt = datetime(2018, 4, 9, 13, 37, 0)
|
||||
>>> delta = relativedelta(hours=25, day=1, weekday=MO(1))
|
||||
>>> dt + delta
|
||||
datetime.datetime(2018, 4, 2, 14, 37)
|
||||
|
||||
First, the day is set to 1 (the first of the month), then 25 hours
|
||||
are added, to get to the 2nd day and 14th hour, finally the
|
||||
weekday is applied, but since the 2nd is already a Monday there is
|
||||
no effect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, dt1=None, dt2=None,
|
||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
||||
year=None, month=None, day=None, weekday=None,
|
||||
yearday=None, nlyearday=None,
|
||||
hour=None, minute=None, second=None, microsecond=None):
|
||||
|
||||
if dt1 and dt2:
|
||||
# datetime is a subclass of date. So both must be date
|
||||
if not (isinstance(dt1, datetime.date) and
|
||||
isinstance(dt2, datetime.date)):
|
||||
raise TypeError("relativedelta only diffs datetime/date")
|
||||
|
||||
# We allow two dates, or two datetimes, so we coerce them to be
|
||||
# of the same type
|
||||
if (isinstance(dt1, datetime.datetime) !=
|
||||
isinstance(dt2, datetime.datetime)):
|
||||
if not isinstance(dt1, datetime.datetime):
|
||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
||||
elif not isinstance(dt2, datetime.datetime):
|
||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
||||
|
||||
self.years = 0
|
||||
self.months = 0
|
||||
self.days = 0
|
||||
self.leapdays = 0
|
||||
self.hours = 0
|
||||
self.minutes = 0
|
||||
self.seconds = 0
|
||||
self.microseconds = 0
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.weekday = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
self.microsecond = None
|
||||
self._has_time = 0
|
||||
|
||||
# Get year / month delta between the two
|
||||
months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month)
|
||||
self._set_months(months)
|
||||
|
||||
# Remove the year/month delta so the timedelta is just well-defined
|
||||
# time units (seconds, days and microseconds)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# If we've overshot our target, make an adjustment
|
||||
if dt1 < dt2:
|
||||
compare = operator.gt
|
||||
increment = 1
|
||||
else:
|
||||
compare = operator.lt
|
||||
increment = -1
|
||||
|
||||
while compare(dt1, dtm):
|
||||
months += increment
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# Get the timedelta between the "months-adjusted" date and dt1
|
||||
delta = dt1 - dtm
|
||||
self.seconds = delta.seconds + delta.days * 86400
|
||||
self.microseconds = delta.microseconds
|
||||
else:
|
||||
# Check for non-integer values in integer-only quantities
|
||||
if any(x is not None and x != int(x) for x in (years, months)):
|
||||
raise ValueError("Non-integer years and months are "
|
||||
"ambiguous and not currently supported.")
|
||||
|
||||
# Relative information
|
||||
self.years = int(years)
|
||||
self.months = int(months)
|
||||
self.days = days + weeks * 7
|
||||
self.leapdays = leapdays
|
||||
self.hours = hours
|
||||
self.minutes = minutes
|
||||
self.seconds = seconds
|
||||
self.microseconds = microseconds
|
||||
|
||||
# Absolute information
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
self.microsecond = microsecond
|
||||
|
||||
if any(x is not None and int(x) != x
|
||||
for x in (year, month, day, hour,
|
||||
minute, second, microsecond)):
|
||||
# For now we'll deprecate floats - later it'll be an error.
|
||||
warn("Non-integer value passed as absolute information. " +
|
||||
"This is not a well-defined condition and will raise " +
|
||||
"errors in future versions.", DeprecationWarning)
|
||||
|
||||
if isinstance(weekday, integer_types):
|
||||
self.weekday = weekdays[weekday]
|
||||
else:
|
||||
self.weekday = weekday
|
||||
|
||||
yday = 0
|
||||
if nlyearday:
|
||||
yday = nlyearday
|
||||
elif yearday:
|
||||
yday = yearday
|
||||
if yearday > 59:
|
||||
self.leapdays = -1
|
||||
if yday:
|
||||
ydayidx = [31, 59, 90, 120, 151, 181, 212,
|
||||
243, 273, 304, 334, 366]
|
||||
for idx, ydays in enumerate(ydayidx):
|
||||
if yday <= ydays:
|
||||
self.month = idx+1
|
||||
if idx == 0:
|
||||
self.day = yday
|
||||
else:
|
||||
self.day = yday-ydayidx[idx-1]
|
||||
break
|
||||
else:
|
||||
raise ValueError("invalid year day (%d)" % yday)
|
||||
|
||||
self._fix()
|
||||
|
||||
def _fix(self):
|
||||
if abs(self.microseconds) > 999999:
|
||||
s = _sign(self.microseconds)
|
||||
div, mod = divmod(self.microseconds * s, 1000000)
|
||||
self.microseconds = mod * s
|
||||
self.seconds += div * s
|
||||
if abs(self.seconds) > 59:
|
||||
s = _sign(self.seconds)
|
||||
div, mod = divmod(self.seconds * s, 60)
|
||||
self.seconds = mod * s
|
||||
self.minutes += div * s
|
||||
if abs(self.minutes) > 59:
|
||||
s = _sign(self.minutes)
|
||||
div, mod = divmod(self.minutes * s, 60)
|
||||
self.minutes = mod * s
|
||||
self.hours += div * s
|
||||
if abs(self.hours) > 23:
|
||||
s = _sign(self.hours)
|
||||
div, mod = divmod(self.hours * s, 24)
|
||||
self.hours = mod * s
|
||||
self.days += div * s
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years += div * s
|
||||
if (self.hours or self.minutes or self.seconds or self.microseconds
|
||||
or self.hour is not None or self.minute is not None or
|
||||
self.second is not None or self.microsecond is not None):
|
||||
self._has_time = 1
|
||||
else:
|
||||
self._has_time = 0
|
||||
|
||||
@property
|
||||
def weeks(self):
|
||||
return int(self.days / 7.0)
|
||||
|
||||
@weeks.setter
|
||||
def weeks(self, value):
|
||||
self.days = self.days - (self.weeks * 7) + value * 7
|
||||
|
||||
def _set_months(self, months):
|
||||
self.months = months
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years = div * s
|
||||
else:
|
||||
self.years = 0
|
||||
|
||||
def normalized(self):
|
||||
"""
|
||||
Return a version of this object represented entirely using integer
|
||||
values for the relative attributes.
|
||||
|
||||
>>> relativedelta(days=1.5, hours=2).normalized()
|
||||
relativedelta(days=+1, hours=+14)
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.relativedelta.relativedelta` object.
|
||||
"""
|
||||
# Cascade remainders down (rounding each to roughly nearest microsecond)
|
||||
days = int(self.days)
|
||||
|
||||
hours_f = round(self.hours + 24 * (self.days - days), 11)
|
||||
hours = int(hours_f)
|
||||
|
||||
minutes_f = round(self.minutes + 60 * (hours_f - hours), 10)
|
||||
minutes = int(minutes_f)
|
||||
|
||||
seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8)
|
||||
seconds = int(seconds_f)
|
||||
|
||||
microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds))
|
||||
|
||||
# Constructor carries overflow back up with call to _fix()
|
||||
return self.__class__(years=self.years, months=self.months,
|
||||
days=days, hours=hours, minutes=minutes,
|
||||
seconds=seconds, microseconds=microseconds,
|
||||
leapdays=self.leapdays, year=self.year,
|
||||
month=self.month, day=self.day,
|
||||
weekday=self.weekday, hour=self.hour,
|
||||
minute=self.minute, second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, relativedelta):
|
||||
return self.__class__(years=other.years + self.years,
|
||||
months=other.months + self.months,
|
||||
days=other.days + self.days,
|
||||
hours=other.hours + self.hours,
|
||||
minutes=other.minutes + self.minutes,
|
||||
seconds=other.seconds + self.seconds,
|
||||
microseconds=(other.microseconds +
|
||||
self.microseconds),
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=(other.year if other.year is not None
|
||||
else self.year),
|
||||
month=(other.month if other.month is not None
|
||||
else self.month),
|
||||
day=(other.day if other.day is not None
|
||||
else self.day),
|
||||
weekday=(other.weekday if other.weekday is not None
|
||||
else self.weekday),
|
||||
hour=(other.hour if other.hour is not None
|
||||
else self.hour),
|
||||
minute=(other.minute if other.minute is not None
|
||||
else self.minute),
|
||||
second=(other.second if other.second is not None
|
||||
else self.second),
|
||||
microsecond=(other.microsecond if other.microsecond
|
||||
is not None else
|
||||
self.microsecond))
|
||||
if isinstance(other, datetime.timedelta):
|
||||
return self.__class__(years=self.years,
|
||||
months=self.months,
|
||||
days=self.days + other.days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds + other.seconds,
|
||||
microseconds=self.microseconds + other.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
if not isinstance(other, datetime.date):
|
||||
return NotImplemented
|
||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
||||
other = datetime.datetime.fromordinal(other.toordinal())
|
||||
year = (self.year or other.year)+self.years
|
||||
month = self.month or other.month
|
||||
if self.months:
|
||||
assert 1 <= abs(self.months) <= 12
|
||||
month += self.months
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
elif month < 1:
|
||||
year -= 1
|
||||
month += 12
|
||||
day = min(calendar.monthrange(year, month)[1],
|
||||
self.day or other.day)
|
||||
repl = {"year": year, "month": month, "day": day}
|
||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
repl[attr] = value
|
||||
days = self.days
|
||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
||||
days += self.leapdays
|
||||
ret = (other.replace(**repl)
|
||||
+ datetime.timedelta(days=days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds,
|
||||
microseconds=self.microseconds))
|
||||
if self.weekday:
|
||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
||||
jumpdays = (abs(nth) - 1) * 7
|
||||
if nth > 0:
|
||||
jumpdays += (7 - ret.weekday() + weekday) % 7
|
||||
else:
|
||||
jumpdays += (ret.weekday() - weekday) % 7
|
||||
jumpdays *= -1
|
||||
ret += datetime.timedelta(days=jumpdays)
|
||||
return ret
|
||||
|
||||
def __radd__(self, other):
|
||||
return self.__add__(other)
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__neg__().__radd__(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented # In case the other object defines __rsub__
|
||||
return self.__class__(years=self.years - other.years,
|
||||
months=self.months - other.months,
|
||||
days=self.days - other.days,
|
||||
hours=self.hours - other.hours,
|
||||
minutes=self.minutes - other.minutes,
|
||||
seconds=self.seconds - other.seconds,
|
||||
microseconds=self.microseconds - other.microseconds,
|
||||
leapdays=self.leapdays or other.leapdays,
|
||||
year=(self.year if self.year is not None
|
||||
else other.year),
|
||||
month=(self.month if self.month is not None else
|
||||
other.month),
|
||||
day=(self.day if self.day is not None else
|
||||
other.day),
|
||||
weekday=(self.weekday if self.weekday is not None else
|
||||
other.weekday),
|
||||
hour=(self.hour if self.hour is not None else
|
||||
other.hour),
|
||||
minute=(self.minute if self.minute is not None else
|
||||
other.minute),
|
||||
second=(self.second if self.second is not None else
|
||||
other.second),
|
||||
microsecond=(self.microsecond if self.microsecond
|
||||
is not None else
|
||||
other.microsecond))
|
||||
|
||||
def __abs__(self):
|
||||
return self.__class__(years=abs(self.years),
|
||||
months=abs(self.months),
|
||||
days=abs(self.days),
|
||||
hours=abs(self.hours),
|
||||
minutes=abs(self.minutes),
|
||||
seconds=abs(self.seconds),
|
||||
microseconds=abs(self.microseconds),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __neg__(self):
|
||||
return self.__class__(years=-self.years,
|
||||
months=-self.months,
|
||||
days=-self.days,
|
||||
hours=-self.hours,
|
||||
minutes=-self.minutes,
|
||||
seconds=-self.seconds,
|
||||
microseconds=-self.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __bool__(self):
|
||||
return not (not self.years and
|
||||
not self.months and
|
||||
not self.days and
|
||||
not self.hours and
|
||||
not self.minutes and
|
||||
not self.seconds and
|
||||
not self.microseconds and
|
||||
not self.leapdays and
|
||||
self.year is None and
|
||||
self.month is None and
|
||||
self.day is None and
|
||||
self.weekday is None and
|
||||
self.hour is None and
|
||||
self.minute is None and
|
||||
self.second is None and
|
||||
self.microsecond is None)
|
||||
# Compatibility with Python 2.x
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __mul__(self, other):
|
||||
try:
|
||||
f = float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__class__(years=int(self.years * f),
|
||||
months=int(self.months * f),
|
||||
days=int(self.days * f),
|
||||
hours=int(self.hours * f),
|
||||
minutes=int(self.minutes * f),
|
||||
seconds=int(self.seconds * f),
|
||||
microseconds=int(self.microseconds * f),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented
|
||||
if self.weekday or other.weekday:
|
||||
if not self.weekday or not other.weekday:
|
||||
return False
|
||||
if self.weekday.weekday != other.weekday.weekday:
|
||||
return False
|
||||
n1, n2 = self.weekday.n, other.weekday.n
|
||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
||||
return False
|
||||
return (self.years == other.years and
|
||||
self.months == other.months and
|
||||
self.days == other.days and
|
||||
self.hours == other.hours and
|
||||
self.minutes == other.minutes and
|
||||
self.seconds == other.seconds and
|
||||
self.microseconds == other.microseconds and
|
||||
self.leapdays == other.leapdays and
|
||||
self.year == other.year and
|
||||
self.month == other.month and
|
||||
self.day == other.day and
|
||||
self.hour == other.hour and
|
||||
self.minute == other.minute and
|
||||
self.second == other.second and
|
||||
self.microsecond == other.microsecond)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
self.weekday,
|
||||
self.years,
|
||||
self.months,
|
||||
self.days,
|
||||
self.hours,
|
||||
self.minutes,
|
||||
self.seconds,
|
||||
self.microseconds,
|
||||
self.leapdays,
|
||||
self.year,
|
||||
self.month,
|
||||
self.day,
|
||||
self.hour,
|
||||
self.minute,
|
||||
self.second,
|
||||
self.microsecond,
|
||||
))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __div__(self, other):
|
||||
try:
|
||||
reciprocal = 1 / float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__mul__(reciprocal)
|
||||
|
||||
__truediv__ = __div__
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in ["years", "months", "days", "leapdays",
|
||||
"hours", "minutes", "seconds", "microseconds"]:
|
||||
value = getattr(self, attr)
|
||||
if value:
|
||||
l.append("{attr}={value:+g}".format(attr=attr, value=value))
|
||||
for attr in ["year", "month", "day", "weekday",
|
||||
"hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("{attr}={value}".format(attr=attr, value=repr(value)))
|
||||
return "{classname}({attrs})".format(classname=self.__class__.__name__,
|
||||
attrs=", ".join(l))
|
||||
|
||||
|
||||
def _sign(x):
|
||||
return int(copysign(1, x))
|
||||
|
||||
# vim:ts=4:sw=4:et
|
||||
1737
tapdown/lib/python3.11/site-packages/dateutil/rrule.py
Normal file
1737
tapdown/lib/python3.11/site-packages/dateutil/rrule.py
Normal file
File diff suppressed because it is too large
Load Diff
12
tapdown/lib/python3.11/site-packages/dateutil/tz/__init__.py
Normal file
12
tapdown/lib/python3.11/site-packages/dateutil/tz/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from .tz import *
|
||||
from .tz import __doc__
|
||||
|
||||
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
|
||||
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz",
|
||||
"enfold", "datetime_ambiguous", "datetime_exists",
|
||||
"resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"]
|
||||
|
||||
|
||||
class DeprecatedTzFormatWarning(Warning):
|
||||
"""Warning raised when time zones are parsed from deprecated formats."""
|
||||
419
tapdown/lib/python3.11/site-packages/dateutil/tz/_common.py
Normal file
419
tapdown/lib/python3.11/site-packages/dateutil/tz/_common.py
Normal file
@@ -0,0 +1,419 @@
|
||||
from six import PY2
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
__all__ = ['tzname_in_python2', 'enfold']
|
||||
|
||||
|
||||
def tzname_in_python2(namefunc):
|
||||
"""Change unicode output into bytestrings in Python 2
|
||||
|
||||
tzname() API changed in Python 3. It used to return bytes, but was changed
|
||||
to unicode strings
|
||||
"""
|
||||
if PY2:
|
||||
@wraps(namefunc)
|
||||
def adjust_encoding(*args, **kwargs):
|
||||
name = namefunc(*args, **kwargs)
|
||||
if name is not None:
|
||||
name = name.encode()
|
||||
|
||||
return name
|
||||
|
||||
return adjust_encoding
|
||||
else:
|
||||
return namefunc
|
||||
|
||||
|
||||
# The following is adapted from Alexander Belopolsky's tz library
|
||||
# https://github.com/abalkin/tz
|
||||
if hasattr(datetime, 'fold'):
|
||||
# This is the pre-python 3.6 fold situation
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
return dt.replace(fold=fold)
|
||||
|
||||
else:
|
||||
class _DatetimeWithFold(datetime):
|
||||
"""
|
||||
This is a class designed to provide a PEP 495-compliant interface for
|
||||
Python versions before 3.6. It is used only for dates in a fold, so
|
||||
the ``fold`` attribute is fixed at ``1``.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def replace(self, *args, **kwargs):
|
||||
"""
|
||||
Return a datetime with the same attributes, except for those
|
||||
attributes given new values by whichever keyword arguments are
|
||||
specified. Note that tzinfo=None can be specified to create a naive
|
||||
datetime from an aware datetime with no conversion of date and time
|
||||
data.
|
||||
|
||||
This is reimplemented in ``_DatetimeWithFold`` because pypy3 will
|
||||
return a ``datetime.datetime`` even if ``fold`` is unchanged.
|
||||
"""
|
||||
argnames = (
|
||||
'year', 'month', 'day', 'hour', 'minute', 'second',
|
||||
'microsecond', 'tzinfo'
|
||||
)
|
||||
|
||||
for arg, argname in zip(args, argnames):
|
||||
if argname in kwargs:
|
||||
raise TypeError('Duplicate argument: {}'.format(argname))
|
||||
|
||||
kwargs[argname] = arg
|
||||
|
||||
for argname in argnames:
|
||||
if argname not in kwargs:
|
||||
kwargs[argname] = getattr(self, argname)
|
||||
|
||||
dt_class = self.__class__ if kwargs.get('fold', 1) else datetime
|
||||
|
||||
return dt_class(**kwargs)
|
||||
|
||||
@property
|
||||
def fold(self):
|
||||
return 1
|
||||
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if getattr(dt, 'fold', 0) == fold:
|
||||
return dt
|
||||
|
||||
args = dt.timetuple()[:6]
|
||||
args += (dt.microsecond, dt.tzinfo)
|
||||
|
||||
if fold:
|
||||
return _DatetimeWithFold(*args)
|
||||
else:
|
||||
return datetime(*args)
|
||||
|
||||
|
||||
def _validate_fromutc_inputs(f):
|
||||
"""
|
||||
The CPython version of ``fromutc`` checks that the input is a ``datetime``
|
||||
object and that ``self`` is attached as its ``tzinfo``.
|
||||
"""
|
||||
@wraps(f)
|
||||
def fromutc(self, dt):
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
return f(self, dt)
|
||||
|
||||
return fromutc
|
||||
|
||||
|
||||
class _tzinfo(tzinfo):
|
||||
"""
|
||||
Base class for all ``dateutil`` ``tzinfo`` objects.
|
||||
"""
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
|
||||
dt = dt.replace(tzinfo=self)
|
||||
|
||||
wall_0 = enfold(dt, fold=0)
|
||||
wall_1 = enfold(dt, fold=1)
|
||||
|
||||
same_offset = wall_0.utcoffset() == wall_1.utcoffset()
|
||||
same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None)
|
||||
|
||||
return same_dt and not same_offset
|
||||
|
||||
def _fold_status(self, dt_utc, dt_wall):
|
||||
"""
|
||||
Determine the fold status of a "wall" datetime, given a representation
|
||||
of the same datetime as a (naive) UTC datetime. This is calculated based
|
||||
on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all
|
||||
datetimes, and that this offset is the actual number of hours separating
|
||||
``dt_utc`` and ``dt_wall``.
|
||||
|
||||
:param dt_utc:
|
||||
Representation of the datetime as UTC
|
||||
|
||||
:param dt_wall:
|
||||
Representation of the datetime as "wall time". This parameter must
|
||||
either have a `fold` attribute or have a fold-naive
|
||||
:class:`datetime.tzinfo` attached, otherwise the calculation may
|
||||
fail.
|
||||
"""
|
||||
if self.is_ambiguous(dt_wall):
|
||||
delta_wall = dt_wall - dt_utc
|
||||
_fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst()))
|
||||
else:
|
||||
_fold = 0
|
||||
|
||||
return _fold
|
||||
|
||||
def _fold(self, dt):
|
||||
return getattr(dt, 'fold', 0)
|
||||
|
||||
def _fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurrence, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
|
||||
# Re-implement the algorithm from Python's datetime.py
|
||||
dtoff = dt.utcoffset()
|
||||
if dtoff is None:
|
||||
raise ValueError("fromutc() requires a non-None utcoffset() "
|
||||
"result")
|
||||
|
||||
# The original datetime.py code assumes that `dst()` defaults to
|
||||
# zero during ambiguous times. PEP 495 inverts this presumption, so
|
||||
# for pre-PEP 495 versions of python, we need to tweak the algorithm.
|
||||
dtdst = dt.dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc() requires a non-None dst() result")
|
||||
delta = dtoff - dtdst
|
||||
|
||||
dt += delta
|
||||
# Set fold=1 so we can default to being in the fold for
|
||||
# ambiguous dates.
|
||||
dtdst = enfold(dt, fold=1).dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc(): dt.dst gave inconsistent "
|
||||
"results; cannot convert")
|
||||
return dt + dtdst
|
||||
|
||||
@_validate_fromutc_inputs
|
||||
def fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurrence, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
dt_wall = self._fromutc(dt)
|
||||
|
||||
# Calculate the fold status given the two datetimes.
|
||||
_fold = self._fold_status(dt, dt_wall)
|
||||
|
||||
# Set the default fold value for ambiguous dates
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
|
||||
class tzrangebase(_tzinfo):
|
||||
"""
|
||||
This is an abstract base class for time zones represented by an annual
|
||||
transition into and out of DST. Child classes should implement the following
|
||||
methods:
|
||||
|
||||
* ``__init__(self, *args, **kwargs)``
|
||||
* ``transitions(self, year)`` - this is expected to return a tuple of
|
||||
datetimes representing the DST on and off transitions in standard
|
||||
time.
|
||||
|
||||
A fully initialized ``tzrangebase`` subclass should also provide the
|
||||
following attributes:
|
||||
* ``hasdst``: Boolean whether or not the zone uses DST.
|
||||
* ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects
|
||||
representing the respective UTC offsets.
|
||||
* ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short
|
||||
abbreviations in DST and STD, respectively.
|
||||
* ``_hasdst``: Whether or not the zone has DST.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzrangebase is an abstract base class')
|
||||
|
||||
def utcoffset(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_offset
|
||||
else:
|
||||
return self._std_offset
|
||||
|
||||
def dst(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_base_offset
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_abbr
|
||||
else:
|
||||
return self._std_abbr
|
||||
|
||||
def fromutc(self, dt):
|
||||
""" Given a datetime in UTC, return local time """
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
# Get transitions - if there are none, fixed offset
|
||||
transitions = self.transitions(dt.year)
|
||||
if transitions is None:
|
||||
return dt + self.utcoffset(dt)
|
||||
|
||||
# Get the transition times in UTC
|
||||
dston, dstoff = transitions
|
||||
|
||||
dston -= self._std_offset
|
||||
dstoff -= self._std_offset
|
||||
|
||||
utc_transitions = (dston, dstoff)
|
||||
dt_utc = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt_utc, utc_transitions)
|
||||
|
||||
if isdst:
|
||||
dt_wall = dt + self._dst_offset
|
||||
else:
|
||||
dt_wall = dt + self._std_offset
|
||||
|
||||
_fold = int(not isdst and self.is_ambiguous(dt_wall))
|
||||
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if not self.hasdst:
|
||||
return False
|
||||
|
||||
start, end = self.transitions(dt.year)
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
return (end <= dt < end + self._dst_base_offset)
|
||||
|
||||
def _isdst(self, dt):
|
||||
if not self.hasdst:
|
||||
return False
|
||||
elif dt is None:
|
||||
return None
|
||||
|
||||
transitions = self.transitions(dt.year)
|
||||
|
||||
if transitions is None:
|
||||
return False
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt, transitions)
|
||||
|
||||
# Handle ambiguous dates
|
||||
if not isdst and self.is_ambiguous(dt):
|
||||
return not self._fold(dt)
|
||||
else:
|
||||
return isdst
|
||||
|
||||
def _naive_isdst(self, dt, transitions):
|
||||
dston, dstoff = transitions
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
if dston < dstoff:
|
||||
isdst = dston <= dt < dstoff
|
||||
else:
|
||||
isdst = not dstoff <= dt < dston
|
||||
|
||||
return isdst
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_offset - self._std_offset
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(...)" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
@@ -0,0 +1,80 @@
|
||||
from datetime import timedelta
|
||||
import weakref
|
||||
from collections import OrderedDict
|
||||
|
||||
from six.moves import _thread
|
||||
|
||||
|
||||
class _TzSingleton(type):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instance = None
|
||||
super(_TzSingleton, cls).__init__(*args, **kwargs)
|
||||
|
||||
def __call__(cls):
|
||||
if cls.__instance is None:
|
||||
cls.__instance = super(_TzSingleton, cls).__call__()
|
||||
return cls.__instance
|
||||
|
||||
|
||||
class _TzFactory(type):
|
||||
def instance(cls, *args, **kwargs):
|
||||
"""Alternate constructor that returns a fresh instance"""
|
||||
return type.__call__(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class _TzOffsetFactory(_TzFactory):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instances = weakref.WeakValueDictionary()
|
||||
cls.__strong_cache = OrderedDict()
|
||||
cls.__strong_cache_size = 8
|
||||
|
||||
cls._cache_lock = _thread.allocate_lock()
|
||||
|
||||
def __call__(cls, name, offset):
|
||||
if isinstance(offset, timedelta):
|
||||
key = (name, offset.total_seconds())
|
||||
else:
|
||||
key = (name, offset)
|
||||
|
||||
instance = cls.__instances.get(key, None)
|
||||
if instance is None:
|
||||
instance = cls.__instances.setdefault(key,
|
||||
cls.instance(name, offset))
|
||||
|
||||
# This lock may not be necessary in Python 3. See GH issue #901
|
||||
with cls._cache_lock:
|
||||
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
||||
|
||||
# Remove an item if the strong cache is overpopulated
|
||||
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
||||
cls.__strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class _TzStrFactory(_TzFactory):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instances = weakref.WeakValueDictionary()
|
||||
cls.__strong_cache = OrderedDict()
|
||||
cls.__strong_cache_size = 8
|
||||
|
||||
cls.__cache_lock = _thread.allocate_lock()
|
||||
|
||||
def __call__(cls, s, posix_offset=False):
|
||||
key = (s, posix_offset)
|
||||
instance = cls.__instances.get(key, None)
|
||||
|
||||
if instance is None:
|
||||
instance = cls.__instances.setdefault(key,
|
||||
cls.instance(s, posix_offset))
|
||||
|
||||
# This lock may not be necessary in Python 3. See GH issue #901
|
||||
with cls.__cache_lock:
|
||||
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
||||
|
||||
# Remove an item if the strong cache is overpopulated
|
||||
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
||||
cls.__strong_cache.popitem(last=False)
|
||||
|
||||
return instance
|
||||
|
||||
1849
tapdown/lib/python3.11/site-packages/dateutil/tz/tz.py
Normal file
1849
tapdown/lib/python3.11/site-packages/dateutil/tz/tz.py
Normal file
File diff suppressed because it is too large
Load Diff
370
tapdown/lib/python3.11/site-packages/dateutil/tz/win.py
Normal file
370
tapdown/lib/python3.11/site-packages/dateutil/tz/win.py
Normal file
@@ -0,0 +1,370 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module provides an interface to the native time zone data on Windows,
|
||||
including :py:class:`datetime.tzinfo` implementations.
|
||||
|
||||
Attempting to import this module on a non-Windows platform will raise an
|
||||
:py:obj:`ImportError`.
|
||||
"""
|
||||
# This code was originally contributed by Jeffrey Harris.
|
||||
import datetime
|
||||
import struct
|
||||
|
||||
from six.moves import winreg
|
||||
from six import text_type
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
except ValueError:
|
||||
# ValueError is raised on non-Windows systems for some horrible reason.
|
||||
raise ImportError("Running tzwin on non-Windows system")
|
||||
|
||||
from ._common import tzrangebase
|
||||
|
||||
__all__ = ["tzwin", "tzwinlocal", "tzres"]
|
||||
|
||||
ONEWEEK = datetime.timedelta(7)
|
||||
|
||||
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
|
||||
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
|
||||
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
|
||||
|
||||
|
||||
def _settzkeyname():
|
||||
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
try:
|
||||
winreg.OpenKey(handle, TZKEYNAMENT).Close()
|
||||
TZKEYNAME = TZKEYNAMENT
|
||||
except WindowsError:
|
||||
TZKEYNAME = TZKEYNAME9X
|
||||
handle.Close()
|
||||
return TZKEYNAME
|
||||
|
||||
|
||||
TZKEYNAME = _settzkeyname()
|
||||
|
||||
|
||||
class tzres(object):
|
||||
"""
|
||||
Class for accessing ``tzres.dll``, which contains timezone name related
|
||||
resources.
|
||||
|
||||
.. versionadded:: 2.5.0
|
||||
"""
|
||||
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
|
||||
|
||||
def __init__(self, tzres_loc='tzres.dll'):
|
||||
# Load the user32 DLL so we can load strings from tzres
|
||||
user32 = ctypes.WinDLL('user32')
|
||||
|
||||
# Specify the LoadStringW function
|
||||
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
|
||||
wintypes.UINT,
|
||||
wintypes.LPWSTR,
|
||||
ctypes.c_int)
|
||||
|
||||
self.LoadStringW = user32.LoadStringW
|
||||
self._tzres = ctypes.WinDLL(tzres_loc)
|
||||
self.tzres_loc = tzres_loc
|
||||
|
||||
def load_name(self, offset):
|
||||
"""
|
||||
Load a timezone name from a DLL offset (integer).
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.load_name(112))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param offset:
|
||||
A positive integer value referring to a string from the tzres dll.
|
||||
|
||||
.. note::
|
||||
|
||||
Offsets found in the registry are generally of the form
|
||||
``@tzres.dll,-114``. The offset in this case is 114, not -114.
|
||||
|
||||
"""
|
||||
resource = self.p_wchar()
|
||||
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
|
||||
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
|
||||
return resource[:nchar]
|
||||
|
||||
def name_from_string(self, tzname_str):
|
||||
"""
|
||||
Parse strings as returned from the Windows registry into the time zone
|
||||
name as defined in the registry.
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.name_from_string('@tzres.dll,-251'))
|
||||
'Dateline Daylight Time'
|
||||
>>> print(tzr.name_from_string('Eastern Standard Time'))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param tzname_str:
|
||||
A timezone name string as returned from a Windows registry key.
|
||||
|
||||
:return:
|
||||
Returns the localized timezone string from tzres.dll if the string
|
||||
is of the form `@tzres.dll,-offset`, else returns the input string.
|
||||
"""
|
||||
if not tzname_str.startswith('@'):
|
||||
return tzname_str
|
||||
|
||||
name_splt = tzname_str.split(',-')
|
||||
try:
|
||||
offset = int(name_splt[1])
|
||||
except:
|
||||
raise ValueError("Malformed timezone string.")
|
||||
|
||||
return self.load_name(offset)
|
||||
|
||||
|
||||
class tzwinbase(tzrangebase):
|
||||
"""tzinfo class based on win32's timezones available in the registry."""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzwinbase is an abstract base class')
|
||||
|
||||
def __eq__(self, other):
|
||||
# Compare on all relevant dimensions, including name.
|
||||
if not isinstance(other, tzwinbase):
|
||||
return NotImplemented
|
||||
|
||||
return (self._std_offset == other._std_offset and
|
||||
self._dst_offset == other._dst_offset and
|
||||
self._stddayofweek == other._stddayofweek and
|
||||
self._dstdayofweek == other._dstdayofweek and
|
||||
self._stdweeknumber == other._stdweeknumber and
|
||||
self._dstweeknumber == other._dstweeknumber and
|
||||
self._stdhour == other._stdhour and
|
||||
self._dsthour == other._dsthour and
|
||||
self._stdminute == other._stdminute and
|
||||
self._dstminute == other._dstminute and
|
||||
self._std_abbr == other._std_abbr and
|
||||
self._dst_abbr == other._dst_abbr)
|
||||
|
||||
@staticmethod
|
||||
def list():
|
||||
"""Return a list of all time zones known to the system."""
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZKEYNAME) as tzkey:
|
||||
result = [winreg.EnumKey(tzkey, i)
|
||||
for i in range(winreg.QueryInfoKey(tzkey)[0])]
|
||||
return result
|
||||
|
||||
def display(self):
|
||||
"""
|
||||
Return the display name of the time zone.
|
||||
"""
|
||||
return self._display
|
||||
|
||||
def transitions(self, year):
|
||||
"""
|
||||
For a given year, get the DST on and off transition times, expressed
|
||||
always on the standard time side. For zones with no transitions, this
|
||||
function returns ``None``.
|
||||
|
||||
:param year:
|
||||
The year whose transitions you would like to query.
|
||||
|
||||
:return:
|
||||
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
|
||||
``(dston, dstoff)`` for zones with an annual DST transition, or
|
||||
``None`` for fixed offset zones.
|
||||
"""
|
||||
|
||||
if not self.hasdst:
|
||||
return None
|
||||
|
||||
dston = picknthweekday(year, self._dstmonth, self._dstdayofweek,
|
||||
self._dsthour, self._dstminute,
|
||||
self._dstweeknumber)
|
||||
|
||||
dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek,
|
||||
self._stdhour, self._stdminute,
|
||||
self._stdweeknumber)
|
||||
|
||||
# Ambiguous dates default to the STD side
|
||||
dstoff -= self._dst_base_offset
|
||||
|
||||
return dston, dstoff
|
||||
|
||||
def _get_hasdst(self):
|
||||
return self._dstmonth != 0
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_base_offset_
|
||||
|
||||
|
||||
class tzwin(tzwinbase):
|
||||
"""
|
||||
Time zone object created from the zone info in the Windows registry
|
||||
|
||||
These are similar to :py:class:`dateutil.tz.tzrange` objects in that
|
||||
the time zone data is provided in the format of a single offset rule
|
||||
for either 0 or 2 time zone transitions per year.
|
||||
|
||||
:param: name
|
||||
The name of a Windows time zone key, e.g. "Eastern Standard Time".
|
||||
The full list of keys can be retrieved with :func:`tzwin.list`.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
keydict = valuestodict(tzkey)
|
||||
|
||||
self._std_abbr = keydict["Std"]
|
||||
self._dst_abbr = keydict["Dlt"]
|
||||
|
||||
self._display = keydict["Display"]
|
||||
|
||||
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
||||
tup = struct.unpack("=3l16h", keydict["TZI"])
|
||||
stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
|
||||
dstoffset = stdoffset-tup[2] # + DaylightBias * -1
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
|
||||
(self._stdmonth,
|
||||
self._stddayofweek, # Sunday = 0
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[4:9]
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstdayofweek, # Sunday = 0
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[12:17]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwin(%s)" % repr(self._name)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, (self._name,))
|
||||
|
||||
|
||||
class tzwinlocal(tzwinbase):
|
||||
"""
|
||||
Class representing the local time zone information in the Windows registry
|
||||
|
||||
While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time`
|
||||
module) to retrieve time zone information, ``tzwinlocal`` retrieves the
|
||||
rules directly from the Windows registry and creates an object like
|
||||
:class:`dateutil.tz.tzwin`.
|
||||
|
||||
Because Windows does not have an equivalent of :func:`time.tzset`, on
|
||||
Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the
|
||||
time zone settings *at the time that the process was started*, meaning
|
||||
changes to the machine's time zone settings during the run of a program
|
||||
on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`.
|
||||
Because ``tzwinlocal`` reads the registry directly, it is unaffected by
|
||||
this issue.
|
||||
"""
|
||||
def __init__(self):
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
|
||||
keydict = valuestodict(tzlocalkey)
|
||||
|
||||
self._std_abbr = keydict["StandardName"]
|
||||
self._dst_abbr = keydict["DaylightName"]
|
||||
|
||||
try:
|
||||
tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME,
|
||||
sn=self._std_abbr)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
_keydict = valuestodict(tzkey)
|
||||
self._display = _keydict["Display"]
|
||||
except OSError:
|
||||
self._display = None
|
||||
|
||||
stdoffset = -keydict["Bias"]-keydict["StandardBias"]
|
||||
dstoffset = stdoffset-keydict["DaylightBias"]
|
||||
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# For reasons unclear, in this particular key, the day of week has been
|
||||
# moved to the END of the SYSTEMTIME structure.
|
||||
tup = struct.unpack("=8h", keydict["StandardStart"])
|
||||
|
||||
(self._stdmonth,
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[1:5]
|
||||
|
||||
self._stddayofweek = tup[7]
|
||||
|
||||
tup = struct.unpack("=8h", keydict["DaylightStart"])
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[1:5]
|
||||
|
||||
self._dstdayofweek = tup[7]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwinlocal()"
|
||||
|
||||
def __str__(self):
|
||||
# str will return the standard name, not the daylight name.
|
||||
return "tzwinlocal(%s)" % repr(self._std_abbr)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, ())
|
||||
|
||||
|
||||
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
|
||||
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
|
||||
first = datetime.datetime(year, month, 1, hour, minute)
|
||||
|
||||
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
|
||||
# Because 7 % 7 = 0
|
||||
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
|
||||
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
|
||||
if (wd.month != month):
|
||||
wd -= ONEWEEK
|
||||
|
||||
return wd
|
||||
|
||||
|
||||
def valuestodict(key):
|
||||
"""Convert a registry key's values to a dictionary."""
|
||||
dout = {}
|
||||
size = winreg.QueryInfoKey(key)[1]
|
||||
tz_res = None
|
||||
|
||||
for i in range(size):
|
||||
key_name, value, dtype = winreg.EnumValue(key, i)
|
||||
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
|
||||
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
|
||||
# that to a proper signed integer
|
||||
if value & (1 << 31):
|
||||
value = value - (1 << 32)
|
||||
elif dtype == winreg.REG_SZ:
|
||||
# If it's a reference to the tzres DLL, load the actual string
|
||||
if value.startswith('@tzres'):
|
||||
tz_res = tz_res or tzres()
|
||||
value = tz_res.name_from_string(value)
|
||||
|
||||
value = value.rstrip('\x00') # Remove trailing nulls
|
||||
|
||||
dout[key_name] = value
|
||||
|
||||
return dout
|
||||
2
tapdown/lib/python3.11/site-packages/dateutil/tzwin.py
Normal file
2
tapdown/lib/python3.11/site-packages/dateutil/tzwin.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# tzwin has moved to dateutil.tz.win
|
||||
from .tz.win import *
|
||||
71
tapdown/lib/python3.11/site-packages/dateutil/utils.py
Normal file
71
tapdown/lib/python3.11/site-packages/dateutil/utils.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers general convenience and utility functions for dealing with
|
||||
datetimes.
|
||||
|
||||
.. versionadded:: 2.7.0
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime, time
|
||||
|
||||
|
||||
def today(tzinfo=None):
|
||||
"""
|
||||
Returns a :py:class:`datetime` representing the current day at midnight
|
||||
|
||||
:param tzinfo:
|
||||
The time zone to attach (also used to determine the current day).
|
||||
|
||||
:return:
|
||||
A :py:class:`datetime.datetime` object representing the current day
|
||||
at midnight.
|
||||
"""
|
||||
|
||||
dt = datetime.now(tzinfo)
|
||||
return datetime.combine(dt.date(), time(0, tzinfo=tzinfo))
|
||||
|
||||
|
||||
def default_tzinfo(dt, tzinfo):
|
||||
"""
|
||||
Sets the ``tzinfo`` parameter on naive datetimes only
|
||||
|
||||
This is useful for example when you are provided a datetime that may have
|
||||
either an implicit or explicit time zone, such as when parsing a time zone
|
||||
string.
|
||||
|
||||
.. doctest::
|
||||
|
||||
>>> from dateutil.tz import tzoffset
|
||||
>>> from dateutil.parser import parse
|
||||
>>> from dateutil.utils import default_tzinfo
|
||||
>>> dflt_tz = tzoffset("EST", -18000)
|
||||
>>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))
|
||||
2014-01-01 12:30:00+00:00
|
||||
>>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))
|
||||
2014-01-01 12:30:00-05:00
|
||||
|
||||
:param dt:
|
||||
The datetime on which to replace the time zone
|
||||
|
||||
:param tzinfo:
|
||||
The :py:class:`datetime.tzinfo` subclass instance to assign to
|
||||
``dt`` if (and only if) it is naive.
|
||||
|
||||
:return:
|
||||
Returns an aware :py:class:`datetime.datetime`.
|
||||
"""
|
||||
if dt.tzinfo is not None:
|
||||
return dt
|
||||
else:
|
||||
return dt.replace(tzinfo=tzinfo)
|
||||
|
||||
|
||||
def within_delta(dt1, dt2, delta):
|
||||
"""
|
||||
Useful for comparing two datetimes that may have a negligible difference
|
||||
to be considered equal.
|
||||
"""
|
||||
delta = abs(delta)
|
||||
difference = dt1 - dt2
|
||||
return -delta <= difference <= delta
|
||||
@@ -0,0 +1,167 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import warnings
|
||||
import json
|
||||
|
||||
from tarfile import TarFile
|
||||
from pkgutil import get_data
|
||||
from io import BytesIO
|
||||
|
||||
from dateutil.tz import tzfile as _tzfile
|
||||
|
||||
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
|
||||
|
||||
ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
||||
METADATA_FN = 'METADATA'
|
||||
|
||||
|
||||
class tzfile(_tzfile):
|
||||
def __reduce__(self):
|
||||
return (gettz, (self._filename,))
|
||||
|
||||
|
||||
def getzoneinfofile_stream():
|
||||
try:
|
||||
return BytesIO(get_data(__name__, ZONEFILENAME))
|
||||
except IOError as e: # TODO switch to FileNotFoundError?
|
||||
warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))
|
||||
return None
|
||||
|
||||
|
||||
class ZoneInfoFile(object):
|
||||
def __init__(self, zonefile_stream=None):
|
||||
if zonefile_stream is not None:
|
||||
with TarFile.open(fileobj=zonefile_stream) as tf:
|
||||
self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name)
|
||||
for zf in tf.getmembers()
|
||||
if zf.isfile() and zf.name != METADATA_FN}
|
||||
# deal with links: They'll point to their parent object. Less
|
||||
# waste of memory
|
||||
links = {zl.name: self.zones[zl.linkname]
|
||||
for zl in tf.getmembers() if
|
||||
zl.islnk() or zl.issym()}
|
||||
self.zones.update(links)
|
||||
try:
|
||||
metadata_json = tf.extractfile(tf.getmember(METADATA_FN))
|
||||
metadata_str = metadata_json.read().decode('UTF-8')
|
||||
self.metadata = json.loads(metadata_str)
|
||||
except KeyError:
|
||||
# no metadata in tar file
|
||||
self.metadata = None
|
||||
else:
|
||||
self.zones = {}
|
||||
self.metadata = None
|
||||
|
||||
def get(self, name, default=None):
|
||||
"""
|
||||
Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method
|
||||
for retrieving zones from the zone dictionary.
|
||||
|
||||
:param name:
|
||||
The name of the zone to retrieve. (Generally IANA zone names)
|
||||
|
||||
:param default:
|
||||
The value to return in the event of a missing key.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
|
||||
"""
|
||||
return self.zones.get(name, default)
|
||||
|
||||
|
||||
# The current API has gettz as a module function, although in fact it taps into
|
||||
# a stateful class. So as a workaround for now, without changing the API, we
|
||||
# will create a new "global" class instance the first time a user requests a
|
||||
# timezone. Ugly, but adheres to the api.
|
||||
#
|
||||
# TODO: Remove after deprecation period.
|
||||
_CLASS_ZONE_INSTANCE = []
|
||||
|
||||
|
||||
def get_zonefile_instance(new_instance=False):
|
||||
"""
|
||||
This is a convenience function which provides a :class:`ZoneInfoFile`
|
||||
instance using the data provided by the ``dateutil`` package. By default, it
|
||||
caches a single instance of the ZoneInfoFile object and returns that.
|
||||
|
||||
:param new_instance:
|
||||
If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and
|
||||
used as the cached instance for the next call. Otherwise, new instances
|
||||
are created only as necessary.
|
||||
|
||||
:return:
|
||||
Returns a :class:`ZoneInfoFile` object.
|
||||
|
||||
.. versionadded:: 2.6
|
||||
"""
|
||||
if new_instance:
|
||||
zif = None
|
||||
else:
|
||||
zif = getattr(get_zonefile_instance, '_cached_instance', None)
|
||||
|
||||
if zif is None:
|
||||
zif = ZoneInfoFile(getzoneinfofile_stream())
|
||||
|
||||
get_zonefile_instance._cached_instance = zif
|
||||
|
||||
return zif
|
||||
|
||||
|
||||
def gettz(name):
|
||||
"""
|
||||
This retrieves a time zone from the local zoneinfo tarball that is packaged
|
||||
with dateutil.
|
||||
|
||||
:param name:
|
||||
An IANA-style time zone name, as found in the zoneinfo file.
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.tz.tzfile` time zone object.
|
||||
|
||||
.. warning::
|
||||
It is generally inadvisable to use this function, and it is only
|
||||
provided for API compatibility with earlier versions. This is *not*
|
||||
equivalent to ``dateutil.tz.gettz()``, which selects an appropriate
|
||||
time zone based on the inputs, favoring system zoneinfo. This is ONLY
|
||||
for accessing the dateutil-specific zoneinfo (which may be out of
|
||||
date compared to the system zoneinfo).
|
||||
|
||||
.. deprecated:: 2.6
|
||||
If you need to use a specific zoneinfofile over the system zoneinfo,
|
||||
instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call
|
||||
:func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead.
|
||||
|
||||
Use :func:`get_zonefile_instance` to retrieve an instance of the
|
||||
dateutil-provided zoneinfo.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz() will be removed in future versions, "
|
||||
"to use the dateutil-provided zoneinfo files, instantiate a "
|
||||
"ZoneInfoFile object and use ZoneInfoFile.zones.get() "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].zones.get(name)
|
||||
|
||||
|
||||
def gettz_db_metadata():
|
||||
""" Get the zonefile metadata
|
||||
|
||||
See `zonefile_metadata`_
|
||||
|
||||
:returns:
|
||||
A dictionary with the database metadata
|
||||
|
||||
.. deprecated:: 2.6
|
||||
See deprecation warning in :func:`zoneinfo.gettz`. To get metadata,
|
||||
query the attribute ``zoneinfo.ZoneInfoFile.metadata``.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future "
|
||||
"versions, to use the dateutil-provided zoneinfo files, "
|
||||
"ZoneInfoFile object and query the 'metadata' attribute "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].metadata
|
||||
Binary file not shown.
@@ -0,0 +1,75 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import json
|
||||
from subprocess import check_call, check_output
|
||||
from tarfile import TarFile
|
||||
|
||||
from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME
|
||||
|
||||
|
||||
def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):
|
||||
"""Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*
|
||||
|
||||
filename is the timezone tarball from ``ftp.iana.org/tz``.
|
||||
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
zonedir = os.path.join(tmpdir, "zoneinfo")
|
||||
moduledir = os.path.dirname(__file__)
|
||||
try:
|
||||
with TarFile.open(filename) as tf:
|
||||
for name in zonegroups:
|
||||
tf.extract(name, tmpdir)
|
||||
filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
|
||||
|
||||
_run_zic(zonedir, filepaths)
|
||||
|
||||
# write metadata file
|
||||
with open(os.path.join(zonedir, METADATA_FN), 'w') as f:
|
||||
json.dump(metadata, f, indent=4, sort_keys=True)
|
||||
target = os.path.join(moduledir, ZONEFILENAME)
|
||||
with TarFile.open(target, "w:%s" % format) as tf:
|
||||
for entry in os.listdir(zonedir):
|
||||
entrypath = os.path.join(zonedir, entry)
|
||||
tf.add(entrypath, entry)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def _run_zic(zonedir, filepaths):
|
||||
"""Calls the ``zic`` compiler in a compatible way to get a "fat" binary.
|
||||
|
||||
Recent versions of ``zic`` default to ``-b slim``, while older versions
|
||||
don't even have the ``-b`` option (but default to "fat" binaries). The
|
||||
current version of dateutil does not support Version 2+ TZif files, which
|
||||
causes problems when used in conjunction with "slim" binaries, so this
|
||||
function is used to ensure that we always get a "fat" binary.
|
||||
"""
|
||||
|
||||
try:
|
||||
help_text = check_output(["zic", "--help"])
|
||||
except OSError as e:
|
||||
_print_on_nosuchfile(e)
|
||||
raise
|
||||
|
||||
if b"-b " in help_text:
|
||||
bloat_args = ["-b", "fat"]
|
||||
else:
|
||||
bloat_args = []
|
||||
|
||||
check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths)
|
||||
|
||||
|
||||
def _print_on_nosuchfile(e):
|
||||
"""Print helpful troubleshooting message
|
||||
|
||||
e is an exception raised by subprocess.check_call()
|
||||
|
||||
"""
|
||||
if e.errno == 2:
|
||||
logging.error(
|
||||
"Could not find zic. Perhaps you need to install "
|
||||
"libc-bin or some other package that provides it, "
|
||||
"or it's not in your PATH?")
|
||||
@@ -0,0 +1 @@
|
||||
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
||||
72
tapdown/lib/python3.11/site-packages/dns/__init__.py
Normal file
72
tapdown/lib/python3.11/site-packages/dns/__init__.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""dnspython DNS toolkit"""
|
||||
|
||||
__all__ = [
|
||||
"asyncbackend",
|
||||
"asyncquery",
|
||||
"asyncresolver",
|
||||
"btree",
|
||||
"btreezone",
|
||||
"dnssec",
|
||||
"dnssecalgs",
|
||||
"dnssectypes",
|
||||
"e164",
|
||||
"edns",
|
||||
"entropy",
|
||||
"exception",
|
||||
"flags",
|
||||
"immutable",
|
||||
"inet",
|
||||
"ipv4",
|
||||
"ipv6",
|
||||
"message",
|
||||
"name",
|
||||
"namedict",
|
||||
"node",
|
||||
"opcode",
|
||||
"query",
|
||||
"quic",
|
||||
"rcode",
|
||||
"rdata",
|
||||
"rdataclass",
|
||||
"rdataset",
|
||||
"rdatatype",
|
||||
"renderer",
|
||||
"resolver",
|
||||
"reversename",
|
||||
"rrset",
|
||||
"serial",
|
||||
"set",
|
||||
"tokenizer",
|
||||
"transaction",
|
||||
"tsig",
|
||||
"tsigkeyring",
|
||||
"ttl",
|
||||
"rdtypes",
|
||||
"update",
|
||||
"version",
|
||||
"versioned",
|
||||
"wire",
|
||||
"xfr",
|
||||
"zone",
|
||||
"zonetypes",
|
||||
"zonefile",
|
||||
]
|
||||
|
||||
from dns.version import version as __version__ # noqa
|
||||
100
tapdown/lib/python3.11/site-packages/dns/_asyncbackend.py
Normal file
100
tapdown/lib/python3.11/site-packages/dns/_asyncbackend.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
# This is a nullcontext for both sync and async. 3.7 has a nullcontext,
|
||||
# but it is only for sync use.
|
||||
|
||||
|
||||
class NullContext:
|
||||
def __init__(self, enter_result=None):
|
||||
self.enter_result = enter_result
|
||||
|
||||
def __enter__(self):
|
||||
return self.enter_result
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
||||
async def __aenter__(self):
|
||||
return self.enter_result
|
||||
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
||||
|
||||
# These are declared here so backends can import them without creating
|
||||
# circular dependencies with dns.asyncbackend.
|
||||
|
||||
|
||||
class Socket: # pragma: no cover
|
||||
def __init__(self, family: int, type: int):
|
||||
self.family = family
|
||||
self.type = type
|
||||
|
||||
async def close(self):
|
||||
pass
|
||||
|
||||
async def getpeername(self):
|
||||
raise NotImplementedError
|
||||
|
||||
async def getsockname(self):
|
||||
raise NotImplementedError
|
||||
|
||||
async def getpeercert(self, timeout):
|
||||
raise NotImplementedError
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_value, traceback):
|
||||
await self.close()
|
||||
|
||||
|
||||
class DatagramSocket(Socket): # pragma: no cover
|
||||
async def sendto(self, what, destination, timeout):
|
||||
raise NotImplementedError
|
||||
|
||||
async def recvfrom(self, size, timeout):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class StreamSocket(Socket): # pragma: no cover
|
||||
async def sendall(self, what, timeout):
|
||||
raise NotImplementedError
|
||||
|
||||
async def recv(self, size, timeout):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NullTransport:
|
||||
async def connect_tcp(self, host, port, timeout, local_address):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Backend: # pragma: no cover
|
||||
def name(self) -> str:
|
||||
return "unknown"
|
||||
|
||||
async def make_socket(
|
||||
self,
|
||||
af,
|
||||
socktype,
|
||||
proto=0,
|
||||
source=None,
|
||||
destination=None,
|
||||
timeout=None,
|
||||
ssl_context=None,
|
||||
server_hostname=None,
|
||||
):
|
||||
raise NotImplementedError
|
||||
|
||||
def datagram_connection_required(self):
|
||||
return False
|
||||
|
||||
async def sleep(self, interval):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_transport_class(self):
|
||||
raise NotImplementedError
|
||||
|
||||
async def wait_for(self, awaitable, timeout):
|
||||
raise NotImplementedError
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user