INITIAL COMMIT

This commit is contained in:
Yandrik 2024-04-16 13:59:48 +02:00
commit 90c5a42b47
14 changed files with 736 additions and 0 deletions

8
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

239
.idea/cody_history.xml Normal file
View File

@ -0,0 +1,239 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChatHistory">
<chats>
<list>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<internalId value="239acd85-7e30-4cf5-b2db-745696d0067f" />
<llm>
<llm>
<model value="anthropic/claude-3-sonnet-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Sonnet" />
</llm>
</llm>
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<internalId value="fcc6c031-36c7-4265-ba85-b5655c0e250d" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="I need a web scraper script. It shall do a GET request to this URL: &#10;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&#10;with a cookie I provide. Then using bs4, it shall find the line: &#10;&#10;&amp;lt;tr class=&amp;quot;dbu&amp;quot;&amp;gt;&#10; &amp;lt;th&amp;gt;Max. Teilnehmer&amp;lt;/th&amp;gt;&#10; &amp;lt;td&amp;gt;10 (aktuell 10)&amp;lt;/td&amp;gt;&#10; &amp;lt;/tr&amp;gt;&#10;&#10;Where there's Max. Teilnehmer as a &amp;lt;th&amp;gt;. It shall extract the max and current participant number. If there is a place free, it shall send a POST request to the URL https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html with the necessary headers (my browser sends these: &#10;&#10;POST /CoronaNG/user/userDetails.html HTTP/1.1&#10;Host: campusonline.uni-ulm.de&#10;User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&#10;Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&#10;Accept-Language: en-GB,en;q=0.5&#10;Accept-Encoding: gzip, deflate, br&#10;Referer: https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&#10;Content-Type: application/x-www-form-urlencoded&#10;Content-Length: 31&#10;Origin: https://campusonline.uni-ulm.de&#10;Connection: keep-alive&#10;Cookie: JSESSIONID=0BF7A99BDE7BA6978E31F5489444372D&#10;Upgrade-Insecure-Requests: 1&#10;Sec-Fetch-Dest: document&#10;Sec-Fetch-Mode: navigate&#10;Sec-Fetch-Site: same-origin&#10;Sec-Fetch-User: ?1&#10;Pragma: no-cache&#10;Cache-Control: no-cache&#10;&#10;)&#10;&#10;and this request payload: &#10;id=18456649&amp;amp;command=participate&#10;&#10;Write this in Python&#10;" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Here's a Python script that performs the web scraping and sends a POST request if there is a free spot:&#10;&#10;```python&#10;import requests&#10;from bs4 import BeautifulSoup&#10;&#10;# URL to scrape&#10;url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = &quot;JSESSIONID=0BF7A99BDE7BA6978E31F5489444372D&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;# Send GET request to the URL with the specified headers&#10;response = requests.get(url, headers=headers)&#10;&#10;# Parse the HTML content using BeautifulSoup&#10;soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10;# Find the &lt;tr&gt; element with class &quot;dbu&quot; and &quot;Max. Teilnehmer&quot; as &lt;th&gt;&#10;tr_element = soup.find(&quot;tr&quot;, class_=&quot;dbu&quot;, string=lambda text: &quot;Max. Teilnehmer&quot; in text)&#10;&#10;if tr_element:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10; max_participants, current_participants = map(int, participant_info.split(&quot;(aktuell&quot;)[0].split())&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = &quot;id=18456649&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; post_response = requests.post(post_url, headers=post_headers, data=payload)&#10;&#10; if post_response.status_code == 200:&#10; print(&quot;Successfully participated!&quot;)&#10; else:&#10; print(&quot;Failed to participate. Status code:&quot;, post_response.status_code)&#10; else:&#10; print(&quot;No free spots available.&quot;)&#10;else:&#10; print(&quot;Participant information not found.&quot;)&#10;```&#10;&#10;This script does the following:&#10;&#10;1. It sends a GET request to the specified URL with the provided headers, including the cookie.&#10;&#10;2. It uses BeautifulSoup to parse the HTML content of the response.&#10;&#10;3. It searches for the `&lt;tr&gt;` element with the class &quot;dbu&quot; and containing the text &quot;Max. Teilnehmer&quot; in the `&lt;th&gt;` element.&#10;&#10;4. If the `&lt;tr&gt;` element is found, it extracts the max and current participant numbers from the `&lt;td&gt;` element.&#10;&#10;5. It checks if there is a free spot by comparing the current participants with the max participants.&#10;&#10;6" />
</message>
<message>
<speaker value="HUMAN" />
<text value="This line doesn't work. Error: &#10;Traceback (most recent call last):&#10; File &amp;quot;/home/yannik/repos/coronang-autojoin/main.py&amp;quot;, line 37, in &amp;lt;module&amp;gt;&#10; tr_element = soup.find(&amp;quot;tr&amp;quot;, class_=&amp;quot;dbu&amp;quot;, string=lambda text: &amp;quot;Max. Teilnehmer&amp;quot; in text)&#10; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 2006, in find&#10; l = self.find_all(name, attrs, recursive, string, 1, _stacklevel=3,&#10; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 2035, in find_all&#10; return self._find_all(name, attrs, string, limit, generator,&#10; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 841, in _find_all&#10; found = strainer.search(i)&#10; ^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 2325, in search&#10; found = self.search_tag(markup)&#10; ^^^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 2296, in search_tag&#10; if found and self.string and not self._matches(found.string, self.string):&#10; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/lib/python3.12/site-packages/bs4/element.py&amp;quot;, line 2357, in _matches&#10; return match_against(markup)&#10; ^^^^^^^^^^^^^^^^^^^^^&#10; File &amp;quot;/home/yannik/repos/coronang-autojoin/main.py&amp;quot;, line 37, in &amp;lt;lambda&amp;gt;&#10; tr_element = soup.find(&amp;quot;tr&amp;quot;, class_=&amp;quot;dbu&amp;quot;, string=lambda text: &amp;quot;Max. Teilnehmer&amp;quot; in text)&#10; ^^^^^^^^^^^^^^^^^^^^^^^^^&#10;TypeError: argument of type 'NoneType' is not iterable" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Apologies for the confusion. It seems like the `string` argument in the `find()` method is causing the issue. Let's modify the code to find the `&lt;tr&gt;` element with the class &quot;dbu&quot; and then check if it contains the text &quot;Max. Teilnehmer&quot; in the `&lt;th&gt;` element.&#10;&#10;Here's the updated code:&#10;&#10;```python&#10;import requests&#10;from bs4 import BeautifulSoup&#10;&#10;# URL to scrape&#10;url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = &quot;JSESSIONID=0BF7A99BDE7BA6978E31F5489444372D&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;# Send GET request to the URL with the specified headers&#10;response = requests.get(url, headers=headers)&#10;&#10;# Parse the HTML content using BeautifulSoup&#10;soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10;# Find the &lt;tr&gt; element with class &quot;dbu&quot;&#10;tr_elements = soup.find_all(&quot;tr&quot;, class_=&quot;dbu&quot;)&#10;&#10;for tr_element in tr_elements:&#10; th_element = tr_element.find(&quot;th&quot;)&#10; if th_element and &quot;Max. Teilnehmer&quot; in th_element.text:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10; max_participants, current_participants = map(int, participant_info.split(&quot;(aktuell&quot;)[0].split())&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = &quot;id=18456649&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; post_response = requests.post(post_url, headers=post_headers, data=payload)&#10;&#10; if post_response.status_code == 200:&#10; print(&quot;Successfully participated!&quot;)&#10; else:&#10; print(&quot;Failed to participate. Status code:&quot;, post_response.status_code)&#10; else:&#10; print(&quot;No free spots available.&quot;)&#10; break&#10;else:&#10; print(&quot;Participant information not found.&quot;)&#10;```&#10;&#10;In this updated code:&#10;&#10;1. We use `find_all()` to find all the `&lt;tr&gt;` elements with the class &quot;dbu&quot;.&#10;&#10;2. We iterate over the found `&lt;tr&gt;` elements and check if each one contains a `&lt;th&gt;` element with the text &quot;Max. Teilneh" />
</message>
</list>
</messages>
<updatedAt value="2024-04-15T21:37:29.294607723" />
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<internalId value="c28f3cfa-1d3c-478c-8913-d330fb780a21" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="Add error handling to this script @.../repos/coronang-autojoin/main.py &#10;Add proper error try-catch with nice error messages and continues. Also, retry 3 times if the POST request fails" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Here's the updated script with error handling and retry logic for the POST request:&#10;&#10;```python&#10;import time&#10;import requests&#10;from bs4 import BeautifulSoup&#10;import re&#10;&#10;# URL to scrape&#10;url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = &quot;JSESSIONID=0BF7A99BDE7BA6978E31F5489444372D&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;while True:&#10; try:&#10; print(&quot;Scraping...&quot;)&#10; # Send GET request to the URL with the specified headers&#10; response = requests.get(url, headers=headers)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10;&#10; # Parse the HTML content using BeautifulSoup&#10; soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10; # Find the &lt;tr&gt; element with class &quot;dbu&quot;&#10; tr_elements = soup.find_all(&quot;tr&quot;, class_=&quot;dbu&quot;)&#10;&#10; for tr_element in tr_elements:&#10; th_element = tr_element.find(&quot;th&quot;)&#10; if th_element and &quot;Max. Teilnehmer&quot; in th_element.text:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10;&#10; # regex to find the numbers in a string like &quot;10 (aktuell 10)&quot;&#10; regex = r&quot;(\d+) \(aktuell (\d+)\)&quot;&#10; match = re.search(regex, participant_info)&#10; if match:&#10; max_participants = int(match.group(1))&#10; current_participants = int(match.group(2))&#10; print(&quot;Max participants:&quot;, max_participants, &quot;; Current participants:&quot;, current_participants)&#10; else:&#10; print(&quot;Failed to parse participant info:&quot;, participant_info)&#10; continue&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = &quot;id=18456649&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; retry_count = 0&#10; while retry_count &lt; 3:&#10; try:&#10; post_response = requests.post(post_url, headers=post_headers, data=payload)&#10; post_response.raise_for_status() #&#10;```" />
</message>
</list>
</messages>
<updatedAt value="2024-04-16T09:30:58.752365538" />
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<internalId value="386a46a6-cf5e-45ab-9820-5330ac3aa541" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="Add error handling to this script @.../repos/coronang-autojoin/main.py Add proper error try-catch with nice error messages and continues. Also, retry 3 times if the POST request fails" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Here's the updated script with error handling and retry functionality:&#10;&#10;```python&#10;import requests&#10;import time&#10;import sys&#10;&#10;def make_request(url, data, headers, max_retries=3):&#10; retries = 0&#10; while retries &lt; max_retries:&#10; try:&#10; response = requests.post(url, data=data, headers=headers)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10; return response&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred: {str(e)}&quot;)&#10; retries += 1&#10; if retries &lt; max_retries:&#10; print(f&quot;Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})&quot;)&#10; time.sleep(5)&#10; else:&#10; print(&quot;Max retries reached. Exiting.&quot;)&#10; sys.exit(1)&#10;&#10;def main():&#10; url = &quot;https://www.coronang.com/api/v1/sessions&quot;&#10; headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36&quot;,&#10; &quot;Content-Type&quot;: &quot;application/json&quot;,&#10; &quot;Accept&quot;: &quot;application/json, text/plain, */*&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-US,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Origin&quot;: &quot;https://www.coronang.com&quot;,&#10; &quot;DNT&quot;: &quot;1&quot;,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Referer&quot;: &quot;https://www.coronang.com/&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;empty&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;cors&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;,&#10; &quot;TE&quot;: &quot;trailers&quot;&#10; }&#10;&#10; while True:&#10; try:&#10; data = '{&quot;operationName&quot;:&quot;CreateSession&quot;,&quot;variables&quot;:{},&quot;query&quot;:&quot;mutation CreateSession {\\n createSession {\\n id\\n __typename\\n }\\n}\\n&quot;}'&#10; response = make_request(url, data, headers)&#10; print(response.text)&#10;&#10; time.sleep(1) # Wait for 1 second before the next request&#10; except KeyboardInterrupt:&#10; print(&quot;Script interrupted by user. Exiting.&quot;)&#10; sys.exit(0)&#10;&#10;if __name__ == &quot;__main__&quot;:&#10; main()&#10;```&#10;&#10;In this updated version of the script, I made the following changes:&#10;&#10;1. Added a `make_request` function that handles the POST request and retries if it fails. It takes the URL, data, headers, and an optional `max_retries` parameter (default is 3).&#10;&#10;2. Inside the `make_request` function, a `while` loop is used to retry the request if it fails. It catches any `requests.exceptions.RequestException` and prints an error message. If the maximum number of retries is not reached, it waits for 5 seconds before retrying. If the maximum retries are reached, it exits the script with a status code of 1.&#10;&#10;3. In the `main` function, the `make_request` function is called with the URL, data, and headers. If the request is successful, it prints the response text.&#10;&#10;4. Added a `try-except` block in the `main` function to catch `KeyboardInterrupt` exceptions. If the user interrupts the script (e.g., by pressing Ctrl+C), it prints a message and exits the script with a status code of 0.&#10;&#10;5. Added a `time.sleep(1)` statement to introduce a 1-second delay between each request to avoid overwhelming the server.&#10;&#10;With these modifications, the script now includes proper error handling, retries the request up to 3 times if it fails, and provides informative error messages. It also gracefully handles user interruptions and adds a small delay between requests." />
</message>
<message>
<speaker value="HUMAN" />
<text value="No. Output the ENTIRE modified version of the @.../repos/coronang-autojoin/main.py script" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Apologies for the confusion. Here's the entire modified version of the `main.py` script:&#10;&#10;```python&#10;import time&#10;import requests&#10;from bs4 import BeautifulSoup&#10;import re&#10;&#10;# URL to scrape&#10;url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = &quot;JSESSIONID=0BF7A99BDE7BA6978E31F5489444372D&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614&quot;,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;def make_post_request(url, headers, payload, max_retries=3):&#10; retries = 0&#10; while retries &lt; max_retries:&#10; try:&#10; response = requests.post(url, headers=headers, data=payload)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10; return response&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during POST request: {str(e)}&quot;)&#10; retries += 1&#10; if retries &lt; max_retries:&#10; print(f&quot;Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})&quot;)&#10; time.sleep(5)&#10; else:&#10; print(&quot;Max retries reached. Exiting.&quot;)&#10; exit(1)&#10;&#10;while True:&#10; try:&#10; print(&quot;Scraping...&quot;)&#10; # Send GET request to the URL with the specified headers&#10; response = requests.get(url, headers=headers)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10;&#10; # Parse the HTML content using BeautifulSoup&#10; soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10; # Find the &lt;tr&gt; element with class &quot;dbu&quot;&#10; tr_elements = soup.find_all(&quot;tr&quot;, class_=&quot;dbu&quot;)&#10;&#10; for tr_element in tr_elements:&#10; th_element = tr_element.find(&quot;th&quot;)&#10; if th_element and &quot;Max. Teilnehmer&quot; in th_element.text:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10;&#10; # regex to find the numbers in a string like &quot;10 (aktuell 10)&quot;&#10; regex = r&quot;(\d+) \(aktuell (\d+)\)&quot;&#10; match = re.search(regex, participant_info)&#10; if match:&#10; max_participants = int(match.group(1))&#10; current_participants = int(match.group(2))&#10; print(&quot;Max participants:&quot;, max_participants, &quot;; Current participants:&quot;, current_participants)&#10; else:&#10; print(&quot;Failed to parse participant info:&quot;, participant_info)&#10; continue&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_&#10;```" />
</message>
<message>
<speaker value="HUMAN" />
<text value="continue" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="```python&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = &quot;id=18456649&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; post_response = make_post_request(post_url, post_headers, payload)&#10;&#10; if post_response.status_code == 200:&#10; print(&quot;Successfully participated!&quot;)&#10; exit(0)&#10; else:&#10; print(&quot;Failed to participate. Status code:&quot;, post_response.status_code)&#10; exit(1)&#10; else:&#10; print(&quot;No free spots available.&quot;)&#10; break&#10; else:&#10; print(&quot;Participant information not found.&quot;)&#10;&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during GET request: {str(e)}&quot;)&#10;&#10; print(f'Current Time: {time.strftime(&quot;%H:%M:%S&quot;)}. Sleeping for 30 seconds...')&#10; time.sleep(30)&#10;```&#10;&#10;In this modified version of the `main.py` script, I made the following changes:&#10;&#10;1. Added a `make_post_request` function that handles the POST request and retries if it fails. It takes the URL, headers, payload, and an optional `max_retries` parameter (default is 3).&#10;&#10;2. Inside the `make_post_request` function, a `while` loop is used to retry the request if it fails. It catches any `requests.exceptions.RequestException` and prints an error message. If the maximum number of retries is not reached, it waits for 5 seconds before retrying. If the maximum retries are reached, it exits the script with a status code of 1.&#10;&#10;3. Wrapped the main logic inside a `try-except` block to catch any `requests.exceptions.RequestException` that may occur during the GET request. If an exception is caught, it prints an error message.&#10;&#10;4. Modified the code to use the `make_post_request` function when sending the POST request to participate.&#10;&#10;5. Added `response.raise_for_status()` after the GET request to raise an exception for 4xx or 5xx status codes.&#10;&#10;With these modifications, the script now includes proper error handling for both the GET and POST requests. It retries the POST request up to 3 times if it fails and provides informative error messages. The script also handles exceptions that may occur during the GET request and continues execution." />
</message>
</list>
</messages>
<updatedAt value="2024-04-16T09:34:18.878815252" />
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<enhancedContext>
<enhancedContext>
<isEnabled value="false" />
</enhancedContext>
</enhancedContext>
<internalId value="7cd3ee5d-0367-49d2-aa54-59d2ae91f4f1" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="# Survey Tool&#10;&#10;In this project, an online platform for digital surveys is to be developed. Users of the platform can create surveys using an editor and then publish them. The editor should provide frequently used form types as templates. The published surveys can then be accessed and answered via a link in the web browser. &#10;&#10;## Specs&#10;Specifications can be found [here](./lars/README.md)&#10;&#10;## Team&#10;&#10;| Name | Role | Email |&#10;| --- | --- | --- |&#10;| Lars Thomae | Lead Phase 1/Platform Specialist+Backend | lars.thomae@uni-ulm.de |&#10;| Nikolas Rank | Developer/Backend+Platform | nikolas.rank@uni-ulm.de |&#10;| Levent Yildiz | Developer/Frontend+Backend | levent.yildiz@uni-ulm.de |&#10;| Yannik Bretschneider | Developer/Frontend+Svelte Specialist | yannik.bretschneider@uni-ulm.de |&#10;&#10;## Getting Help&#10;&#10;If you need assistance, have questions, or want to reach out to the team, please contact us via the email addresses listed above to the team member responseible for communication. &#10;&#10;## Reporting Issues &amp;amp; Requesting Changes&#10;&#10;To report bugs, request new features, or suggest changes to requirements, please follow this process:&#10;&#10;1. Check the existing [GitLab Issues](https://gitlab.com/spmsec/uulm-se-project-ss24/prototyping/-/issues) to see if your issue or request has already been submitted. &#10;&#10;2. If not, create a new issue by clicking the &amp;quot;New Issue&amp;quot; button.&#10;&#10;3. Choose the appropriate issue template (in progress):&#10; - Bug Report: For reporting bugs or issues with functionality&#10; - Feature Request: For proposing new features or enhancements&#10; - Change Request: For requesting changes to existing requirements or specifications&#10;&#10;4. Fill out the selected template completely, providing as much detail as possible.&#10;&#10;5. Submit the issue and the team will triage and prioritize it appropriately. You can track and discuss progress on the issue.&#10;&#10;&#10;## prototyping&#10;&#10;This repository will contain different prototyping resources for the upcoming software engineering project (summer term 24).&#10;&#10;### Folder structure&#10;&#10;The different named folders belong to each participant and should serve as a baseline container to store project notes, documentation etc.&#10;The proposed structure for `product/` should only serve for illustration purposes, this is not fixed. &#10; &#10;```bash&#10;product/&#10; subsystems/&#10; client/&#10; technologies.md&#10; domain-model.md&#10; raw-architecture.md&#10; api/&#10; technologies.md&#10; domain-model.md&#10; raw-architecture.md&#10; platform/&#10; technologies.md&#10; domain-model.md&#10; automation-concept.md&#10; delivery.md&#10; release-planning.md&#10; README.md&#10; features.md&#10; roadmap.md&#10; development-process.md&#10; technologies.md&#10;----- PROTOTYPING AREA -----&#10;code/&#10; terraform/&#10; ansible/&#10; scripts/&#10; python/&#10; shell/&#10; README.md&#10;----- PARTICIPANT AREA ----- &#10;lars/&#10; PRODUCT-README.md&#10;levent/&#10; PRODUCT-README.md&#10;nikolas/&#10; PRODUCT-README.md&#10;yannik/&#10; PRODUCT-README.md&#10;```&#10;&#10;&#10;## License&#10;&#10;TODO&#10;&#10;&#10;&#10;&#10;&#10;Rewrite this with the following extra info and sections: &#10;&#10;- Process for Merge Requests: Create branches automatically. Mention issue number (e.g. #6) in commits to get links to the issue on GitLab&#10;- According to the Client: Think about the review process and refine it in a way that would leave space to include such feedback and desires from @SvenPatrick, do this too" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="# Survey Tool&#10;&#10;In this project, an online platform for digital surveys is to be developed. Users of the platform can create surveys using an editor and then publish them. The editor should provide frequently used form types as templates. The published surveys can then be accessed and answered via a link in the web browser. &#10;&#10;## Specs&#10;Specifications can be found [here](./lars/README.md)&#10;&#10;## Team&#10;&#10;| Name | Role | Email |&#10;| --- | --- | --- |&#10;| Lars Thomae | Lead Phase 1/Platform Specialist+Backend | lars.thomae@uni-ulm.de |&#10;| Nikolas Rank | Developer/Backend+Platform | nikolas.rank@uni-ulm.de |&#10;| Levent Yildiz | Developer/Frontend+Backend | levent.yildiz@uni-ulm.de |&#10;| Yannik Bretschneider | Developer/Frontend+Svelte Specialist | yannik.bretschneider@uni-ulm.de |&#10;&#10;## Getting Help&#10;&#10;If you need assistance, have questions, or want to reach out to the team, please contact us via the email addresses listed above to the team member responsible for communication. &#10;&#10;## Reporting Issues &amp;amp; Requesting Changes&#10;&#10;To report bugs, request new features, or suggest changes to requirements, please follow this process:&#10;&#10;1. Check the existing [GitLab Issues](https://gitlab.com/spmsec/uulm-se-project-ss24/prototyping/-/issues) to see if your issue or request has already been submitted. &#10;&#10;2. If not, create a new issue by clicking the &amp;quot;New Issue&amp;quot; button.&#10;&#10;3. Choose the" />
</message>
</list>
</messages>
<updatedAt value="2024-04-16T10:40:48.247587726" />
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<enhancedContext>
<enhancedContext>
<isEnabled value="false" />
</enhancedContext>
</enhancedContext>
<internalId value="5a09d07c-d9a1-4d1d-94b8-92f88e281de0" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="# Survey Tool&#10;&#10;In this project, an online platform for digital surveys is to be developed. Users of the platform can create surveys using an editor and then publish them. The editor should provide frequently used form types as templates. The published surveys can then be accessed and answered via a link in the web browser. &#10;&#10;## Specs&#10;Specifications can be found [here](./lars/README.md)&#10;&#10;## Team&#10;&#10;| Name | Role | Email |&#10;| --- | --- | --- |&#10;| Lars Thomae | Lead Phase 1/Platform Specialist+Backend | lars.thomae@uni-ulm.de |&#10;| Nikolas Rank | Developer/Backend+Platform | nikolas.rank@uni-ulm.de |&#10;| Levent Yildiz | Developer/Frontend+Backend | levent.yildiz@uni-ulm.de |&#10;| Yannik Bretschneider | Developer/Frontend+Svelte Specialist | yannik.bretschneider@uni-ulm.de |&#10;&#10;## Getting Help&#10;&#10;If you need assistance, have questions, or want to reach out to the team, please contact us via the email addresses listed above to the team member responseible for communication. &#10;&#10;## Reporting Issues &amp;amp; Requesting Changes&#10;&#10;To report bugs, request new features, or suggest changes to requirements, please follow this process:&#10;&#10;1. Check the existing [GitLab Issues](https://gitlab.com/spmsec/uulm-se-project-ss24/prototyping/-/issues) to see if your issue or request has already been submitted. &#10;&#10;2. If not, create a new issue by clicking the &amp;quot;New Issue&amp;quot; button.&#10;&#10;3. Choose the appropriate issue template (in progress):&#10; - Bug Report: For reporting bugs or issues with functionality&#10; - Feature Request: For proposing new features or enhancements&#10; - Change Request: For requesting changes to existing requirements or specifications&#10;&#10;4. Fill out the selected template completely, providing as much detail as possible.&#10;&#10;5. Submit the issue and the team will triage and prioritize it appropriately. You can track and discuss progress on the issue.&#10;&#10;&#10;## prototyping&#10;&#10;This repository will contain different prototyping resources for the upcoming software engineering project (summer term 24).&#10;&#10;### Folder structure&#10;&#10;The different named folders belong to each participant and should serve as a baseline container to store project notes, documentation etc.&#10;The proposed structure for `product/` should only serve for illustration purposes, this is not fixed. &#10; &#10;```bash&#10;product/&#10; subsystems/&#10; client/&#10; technologies.md&#10; domain-model.md&#10; raw-architecture.md&#10; api/&#10; technologies.md&#10; domain-model.md&#10; raw-architecture.md&#10; platform/&#10; technologies.md&#10; domain-model.md&#10; automation-concept.md&#10; delivery.md&#10; release-planning.md&#10; README.md&#10; features.md&#10; roadmap.md&#10; development-process.md&#10; technologies.md&#10;----- PROTOTYPING AREA -----&#10;code/&#10; terraform/&#10; ansible/&#10; scripts/&#10; python/&#10; shell/&#10; README.md&#10;----- PARTICIPANT AREA ----- &#10;lars/&#10; PRODUCT-README.md&#10;levent/&#10; PRODUCT-README.md&#10;nikolas/&#10; PRODUCT-README.md&#10;yannik/&#10; PRODUCT-README.md&#10;```&#10;&#10;&#10;## License&#10;&#10;TODO&#10;&#10;&#10;&#10;&#10;&#10;Rewrite this with the following extra info and sections:&#10;Process for Merge Requests: Create branches automatically. Mention issue number (e.g. #6) in commits to get links to the issue on GitLab&#10;According to the Client: Think about the review process and refine it in a way that would leave space to include such feedback and desires from @SvenPatrick, do this too. Put the response into a Mardkdown code block" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="```markdown&#10;# Survey Tool&#10;&#10;In this project, an online platform for digital surveys is to be developed. Users of the platform can create surveys using an editor and then publish them. The editor should provide frequently used form types as templates. The published surveys can then be accessed and answered via a link in the web browser. &#10;&#10;## Specs&#10;Specifications can be found [here](./lars/README.md)&#10;&#10;## Team&#10;&#10;| Name | Role | Email |&#10;| --- | --- | --- |&#10;| Lars Thomae | Lead Phase 1/Platform Specialist+Backend | lars.thomae@uni-ulm.de |&#10;| Nikolas Rank | Developer/Backend+Platform | nikolas.rank@uni-ulm.de |&#10;| Levent Yildiz | Developer/Frontend+Backend | levent.yildiz@uni-ulm.de |&#10;| Yannik Bretschneider | Developer/Frontend+Svelte Specialist | yannik.bretschneider@uni-ulm.de |&#10;&#10;## Getting Help&#10;&#10;If you need assistance, have questions, or want to reach out to the team, please contact us via the email addresses listed above to the team member responsible for communication. &#10;&#10;## Reporting Issues &amp;amp; Requesting Changes&#10;&#10;To report bugs, request new features, or suggest changes to requirements, please follow this process:&#10;&#10;1. Check the existing [GitLab Issues](https://gitlab.com/spmsec/uulm-se-project-ss24/prototyping/-/issues) to see if your issue or request has already been submitted. &#10;&#10;2. If not, create a new issue by clicking the &amp;quot;New Issue&amp;quot; button.&#10;&#10;3. Choose the appropriate issue template (in progress):&#10; - Bug Report: For reporting bugs or issues with functionality&#10; - Feature Request: For proposing new features or enhancements&#10; - Change Request: For requesting changes to existing requirements or specifications&#10;&#10;4. Fill out the selected template completely, providing as much detail as possible.&#10;&#10;5. Submit the issue and the team will triage and prioritize it appropriately. You can track and discuss progress on the issue.&#10;&#10;## Process for Merge Requests&#10;&#10;1. Create a new branch for your changes. Branch names should follow the convention `issue-&lt;issue-number&gt;-&lt;short-description&gt;`, e.g., `issue-6-add-login-form`.&#10;&#10;2. Make your changes in the new branch, committing them with descriptive commit messages. Include the relevant issue number in your commit messages using the format `#&lt;issue-number&gt;`, e.g., `Add login form (#6)`. This will automatically link the commit to the corresponding issue on GitLab.&#10;&#10;3. When your changes are ready, create a new Merge Request (MR) from your branch to the main branch.&#10;&#10;4. Fill out the MR template, providing a clear description of the changes, any relevant information for reviewers, and referencing the associated issue(s) using the `#&lt;issue-number&gt;` format.&#10;&#10;5. Assign the appropriate reviewers to the MR. The reviewers will provide feedback, request changes if necessary, and ultimately approve the MR for merging.&#10;&#10;6. Once the MR has been approved, it can be merged into the main branch.&#10;&#10;## According to the Client&#10;&#10;As the client, @SvenPatrick, I would like to provide some input on the review process for merge requests:&#10;&#10;1. Ensure that at least two team members review each merge request before it is merged. This will help catch any potential issues and ensure code quality.&#10;&#10;2. Encourage reviewers to provide constructive feedback and suggestions for improvement. This will foster a collaborative environment and help the team learn from each other.&#10;&#10;3. If significant changes are requested during the review process, the author of the merge request should address the feedback and update the branch accordingly. The reviewers should then re-review the changes to ensure they meet the required standards.&#10;&#10;4. Consider implementing a code style guide or linting rules to maintain consistency across the codebase. This will make the review process smoother and help avoid nitpicking over minor style issues.&#10;&#10;5. If there are any disagreements or conflicts during the review process, encourage open communication and try to reach a consensus. If necessary, involve the project lead or a designated mediator to help resolve the issue.&#10;&#10;By incorporating these suggestions into the review process, we can ensure that the project maintains a high level of quality while also fostering a positive and collaborative team dynamic.&#10;&#10;## prototyping&#10;&#10;This repository will contain different prototyping&#10;```" />
</message>
</list>
</messages>
<updatedAt value="2024-04-16T10:41:40.548891115" />
</chat>
<chat>
<accountId value="VXNlcjozODgwNDA=" />
<enhancedContext>
<enhancedContext>
<isEnabled value="false" />
</enhancedContext>
</enhancedContext>
<internalId value="73db802b-db9e-47b4-8673-45a5ed7e38d5" />
<llm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</llm>
<messages>
<list>
<message>
<speaker value="HUMAN" />
<text value="parametrize this script&#10;@.../repos/coronang-autojoin/main.py &#10;Add the following: &#10;- argparse inputs for cvid and jsessionid cookie" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Here's the updated script with argparse inputs for cvid and jsessionid cookie:&#10;&#10;```python&#10;import time&#10;import requests&#10;from bs4 import BeautifulSoup&#10;import re&#10;import argparse&#10;&#10;# Create an ArgumentParser object&#10;parser = argparse.ArgumentParser(description=&quot;CoronaNG Autojoin Script&quot;)&#10;&#10;# Add arguments for cvid and jsessionid&#10;parser.add_argument(&quot;--cvid&quot;, type=int, required=True, help=&quot;The cvid value&quot;)&#10;parser.add_argument(&quot;--jsessionid&quot;, type=str, required=True, help=&quot;The JSESSIONID cookie value&quot;)&#10;&#10;# Parse the command-line arguments&#10;args = parser.parse_args()&#10;&#10;# URL to scrape&#10;url = f&quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid={args.cvid}&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = f&quot;JSESSIONID={args.jsessionid}&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: url,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;def make_post_request(url, headers, payload, max_retries=3):&#10; retries = 0&#10; while retries &lt; max_retries:&#10; try:&#10; response = requests.post(url, headers=headers, data=payload)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10; return response&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during POST request: {str(e)}&quot;)&#10; retries += 1&#10; if retries &lt; max_retries:&#10; print(f&quot;Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})&quot;)&#10; time.sleep(5)&#10; else:&#10; print(&quot;Max retries reached. Exiting.&quot;)&#10; exit(1)&#10;&#10;while True:&#10; try:&#10; print(&quot;Scraping...&quot;)&#10; # Send GET request to the URL with the specified headers&#10; response = requests.get(url, headers=headers)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10;&#10; # Parse the HTML content using BeautifulSoup&#10; soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10; # Find the &lt;tr&gt; element with class &quot;dbu&quot;&#10; tr_elements = soup.find_all(&quot;tr&quot;, class_=&quot;dbu&quot;)&#10;&#10; for tr_element in tr_elements:&#10; th_element = tr_element.find(&quot;th&quot;)&#10; if th_element and &quot;Max. Teilnehmer&quot; in th_element.text:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10;&#10; # regex to find the numbers in a string like &quot;10 (aktuell 10)&quot;&#10; regex = r&quot;(\d+) \(aktuell (\d+)\)&quot;&#10; match = re.search(regex, participant_info)&#10; if match:&#10; max_participants = int(match.group(1))&#10; current_participants = int(match&#10;```" />
</message>
<message>
<speaker value="HUMAN" />
<text value="continue" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="```python&#10; print(&quot;Max participants:&quot;, max_participants, &quot;; Current participants:&quot;, current_participants)&#10; else:&#10; print(&quot;Failed to parse participant info:&quot;, participant_info)&#10; continue&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = f&quot;id={args.cvid}&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; post_response = make_post_request(post_url, post_headers, payload)&#10;&#10; if post_response.status_code == 200:&#10; print(&quot;Successfully participated!&quot;)&#10; exit(0)&#10; else:&#10; print(&quot;Failed to participate. Status code:&quot;, post_response.status_code)&#10; exit(1)&#10; else:&#10; print(&quot;No free spots available.&quot;)&#10; break&#10; else:&#10; print(&quot;Participant information not found.&quot;)&#10;&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during GET request: {str(e)}&quot;)&#10;&#10; print(f'Current Time: {time.strftime(&quot;%H:%M:%S&quot;)}. Sleeping for 30 seconds...')&#10; time.sleep(30)&#10;```&#10;&#10;In this updated version of the script, I added argparse to handle command-line arguments for `cvid` and `jsessionid`. Here's a breakdown of the changes:&#10;&#10;1. Imported the `argparse` module.&#10;2. Created an `ArgumentParser` object with a description for the script.&#10;3. Added arguments for `cvid` (required, integer) and `jsessionid` (required, string) using `parser.add_argument()`.&#10;4. Parsed the command-line arguments using `args = parser.parse_args()`.&#10;5. Updated the `url` to include the `cvid` value from the command-line argument.&#10;6. Updated the `cookie` to include the `jsessionid` value from the command-line argument.&#10;7. Updated the `payload` in the POST request to include the `cvid` value from the command-line argument.&#10;&#10;Now, when running the script, you need to provide the `cvid` and `jsessionid` values as command-line arguments. For example:&#10;&#10;```&#10;python main.py --cvid 18456614 --jsessionid 5729EE2B769FE54D4663C1B7620E4B43&#10;```&#10;&#10;This will set the `cvid` to `18456614` and the `jsessionid` to `5729EE2B769FE54D4663C1B7620E4B43` when running the script." />
</message>
<message>
<speaker value="HUMAN" />
<text value="Now output the entire changed script into one big Python codeblock" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="Here's the entire updated script in one big Python codeblock:&#10;&#10;```python&#10;import time&#10;import requests&#10;from bs4 import BeautifulSoup&#10;import re&#10;import argparse&#10;&#10;# Create an ArgumentParser object&#10;parser = argparse.ArgumentParser(description=&quot;CoronaNG Autojoin Script&quot;)&#10;&#10;# Add arguments for cvid and jsessionid&#10;parser.add_argument(&quot;--cvid&quot;, type=int, required=True, help=&quot;The cvid value&quot;)&#10;parser.add_argument(&quot;--jsessionid&quot;, type=str, required=True, help=&quot;The JSESSIONID cookie value&quot;)&#10;&#10;# Parse the command-line arguments&#10;args = parser.parse_args()&#10;&#10;# URL to scrape&#10;url = f&quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid={args.cvid}&quot;&#10;&#10;# Cookie to include in the request&#10;cookie = f&quot;JSESSIONID={args.jsessionid}&quot;&#10;&#10;# Headers for the GET request&#10;headers = {&#10; &quot;User-Agent&quot;: &quot;Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0&quot;,&#10; &quot;Accept&quot;: &quot;text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8&quot;,&#10; &quot;Accept-Language&quot;: &quot;en-GB,en;q=0.5&quot;,&#10; &quot;Accept-Encoding&quot;: &quot;gzip, deflate, br&quot;,&#10; &quot;Referer&quot;: url,&#10; &quot;Connection&quot;: &quot;keep-alive&quot;,&#10; &quot;Cookie&quot;: cookie,&#10; &quot;Upgrade-Insecure-Requests&quot;: &quot;1&quot;,&#10; &quot;Sec-Fetch-Dest&quot;: &quot;document&quot;,&#10; &quot;Sec-Fetch-Mode&quot;: &quot;navigate&quot;,&#10; &quot;Sec-Fetch-Site&quot;: &quot;same-origin&quot;,&#10; &quot;Sec-Fetch-User&quot;: &quot;?1&quot;,&#10; &quot;Pragma&quot;: &quot;no-cache&quot;,&#10; &quot;Cache-Control&quot;: &quot;no-cache&quot;&#10;}&#10;&#10;def make_post_request(url, headers, payload, max_retries=3):&#10; retries = 0&#10; while retries &lt; max_retries:&#10; try:&#10; response = requests.post(url, headers=headers, data=payload)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10; return response&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during POST request: {str(e)}&quot;)&#10; retries += 1&#10; if retries &lt; max_retries:&#10; print(f&quot;Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})&quot;)&#10; time.sleep(5)&#10; else:&#10; print(&quot;Max retries reached. Exiting.&quot;)&#10; exit(1)&#10;&#10;while True:&#10; try:&#10; print(&quot;Scraping...&quot;)&#10; # Send GET request to the URL with the specified headers&#10; response = requests.get(url, headers=headers)&#10; response.raise_for_status() # Raise an exception for 4xx or 5xx status codes&#10;&#10; # Parse the HTML content using BeautifulSoup&#10; soup = BeautifulSoup(response.content, &quot;html.parser&quot;)&#10;&#10; # Find the &lt;tr&gt; element with class &quot;dbu&quot;&#10; tr_elements = soup.find_all(&quot;tr&quot;, class_=&quot;dbu&quot;)&#10;&#10; for tr_element in tr_elements:&#10; th_element = tr_element.find(&quot;th&quot;)&#10; if th_element and &quot;Max. Teilnehmer&quot; in th_element.text:&#10; # Extract the max and current participant numbers&#10; td_element = tr_element.find(&quot;td&quot;)&#10; participant_info = td_element.text.strip()&#10;&#10; # regex to find the numbers in a string like &quot;10 (aktuell 10)&quot;&#10; regex = r&quot;(\d+) \(aktuell (\d+)\)&quot;&#10; match = re.search(regex, participant_info)&#10; if match:&#10; max_participants = int(match.group(1))&#10; current_participants = int(match.group(2&#10;```" />
</message>
<message>
<speaker value="HUMAN" />
<text value="continue from if match:" />
</message>
<message>
<speaker value="ASSISTANT" />
<text value="```python&#10; if match:&#10; max_participants = int(match.group(1))&#10; current_participants = int(match.group(2))&#10; print(&quot;Max participants:&quot;, max_participants, &quot;; Current participants:&quot;, current_participants)&#10; else:&#10; print(&quot;Failed to parse participant info:&quot;, participant_info)&#10; continue&#10;&#10; # Check if there is a free spot&#10; if current_participants &lt; max_participants:&#10; # Send POST request to participate&#10; post_url = &quot;https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html&quot;&#10; payload = f&quot;id={args.cvid}&amp;command=participate&quot;&#10; post_headers = headers.copy()&#10; post_headers[&quot;Content-Type&quot;] = &quot;application/x-www-form-urlencoded&quot;&#10; post_headers[&quot;Content-Length&quot;] = str(len(payload))&#10; post_headers[&quot;Origin&quot;] = &quot;https://campusonline.uni-ulm.de&quot;&#10;&#10; post_response = make_post_request(post_url, post_headers, payload)&#10;&#10; if post_response.status_code == 200:&#10; print(&quot;Successfully participated!&quot;)&#10; exit(0)&#10; else:&#10; print(&quot;Failed to participate. Status code:&quot;, post_response.status_code)&#10; exit(1)&#10; else:&#10; print(&quot;No free spots available.&quot;)&#10; break&#10; else:&#10; print(&quot;Participant information not found.&quot;)&#10;&#10; except requests.exceptions.RequestException as e:&#10; print(f&quot;Error occurred during GET request: {str(e)}&quot;)&#10;&#10; print(f'Current Time: {time.strftime(&quot;%H:%M:%S&quot;)}. Sleeping for 30 seconds...')&#10; time.sleep(30)&#10;```&#10;&#10;This completes the entire updated script with argparse inputs for `cvid` and `jsessionid`. The script can now be run from the command line with the required arguments:&#10;&#10;```&#10;python main.py --cvid 18456614 --jsessionid 5729EE2B769FE54D4663C1B7620E4B43&#10;```&#10;&#10;The script will use the provided `cvid` and `jsessionid` values to make the necessary requests and attempt to participate in the event if a free spot is available." />
</message>
</list>
</messages>
<updatedAt value="2024-04-16T13:58:21.04196231" />
</chat>
</list>
</chats>
<defaultEnhancedContext>
<enhancedContext>
<isEnabled value="false" />
</enhancedContext>
</defaultEnhancedContext>
<defaultLlm>
<llm>
<model value="anthropic/claude-3-opus-20240229" />
<provider value="Anthropic" />
<title value="Claude 3 Opus" />
</llm>
</defaultLlm>
</component>
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Poetry (coronang-autojoin)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Poetry (coronang-autojoin)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Poetry (coronang-autojoin)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/coronang-autojoin.iml" filepath="$PROJECT_DIR$/.idea/coronang-autojoin.iml" />
</modules>
</component>
</project>

10
.idea/poetry.xml Normal file
View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PoetryConfigService">
<option name="poetryVirtualenvPaths">
<set>
<option value="$USER_HOME$/.cache/pypoetry/virtualenvs/coronang-autojoin-m2p5wzdo-py3.12/bin/python" />
</set>
</option>
</component>
</project>

0
README.md Normal file
View File

View File

117
main.py Normal file
View File

@ -0,0 +1,117 @@
import time
import requests
from bs4 import BeautifulSoup
import re
import argparse
# Create an ArgumentParser object
parser = argparse.ArgumentParser(description="CoronaNG Autojoin Script")
# Add arguments for cvid and jsessionid
parser.add_argument("--cvid", type=int, required=True, help="The cvid value")
parser.add_argument("--jsessionid", type=str, required=True, help="The JSESSIONID cookie value")
# Parse the command-line arguments
args = parser.parse_args()
# URL to scrape
url = f"https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid={args.cvid}"
# Cookie to include in the request
cookie = f"JSESSIONID={args.jsessionid}"
# Headers for the GET request
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
"Accept-Language": "en-GB,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": url,
"Connection": "keep-alive",
"Cookie": cookie,
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
}
def make_post_request(url, headers, payload, max_retries=3):
retries = 0
while retries < max_retries:
try:
response = requests.post(url, headers=headers, data=payload)
response.raise_for_status() # Raise an exception for 4xx or 5xx status codes
return response
except requests.exceptions.RequestException as e:
print(f"Error occurred during POST request: {str(e)}")
retries += 1
if retries < max_retries:
print(f"Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})")
time.sleep(5)
else:
print("Max retries reached. Exiting.")
exit(1)
while True:
try:
print("Scraping...")
# Send GET request to the URL with the specified headers
response = requests.get(url, headers=headers)
response.raise_for_status() # Raise an exception for 4xx or 5xx status codes
# Parse the HTML content using BeautifulSoup
soup = BeautifulSoup(response.content, "html.parser")
# Find the <tr> element with class "dbu"
tr_elements = soup.find_all("tr", class_="dbu")
for tr_element in tr_elements:
th_element = tr_element.find("th")
if th_element and "Max. Teilnehmer" in th_element.text:
# Extract the max and current participant numbers
td_element = tr_element.find("td")
participant_info = td_element.text.strip()
# regex to find the numbers in a string like "10 (aktuell 10)"
regex = r"(\d+) \(aktuell (\d+)\)"
match = re.search(regex, participant_info)
if match:
max_participants = int(match.group(1))
current_participants = int(match.group(2))
print("Max participants:", max_participants, "; Current participants:", current_participants)
else:
print("Failed to parse participant info:", participant_info)
continue
# Check if there is a free spot
if current_participants < max_participants:
# Send POST request to participate
post_url = "https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html"
payload = f"id={args.cvid}&command=participate"
post_headers = headers.copy()
post_headers["Content-Type"] = "application/x-www-form-urlencoded"
post_headers["Content-Length"] = str(len(payload))
post_headers["Origin"] = "https://campusonline.uni-ulm.de"
post_response = make_post_request(post_url, post_headers, payload)
if post_response.status_code == 200:
print("Successfully participated!")
exit(0)
else:
print("Failed to participate. Status code:", post_response.status_code)
exit(1)
else:
print("No free spots available.")
break
else:
print("Participant information not found.")
except requests.exceptions.RequestException as e:
print(f"Error occurred during GET request: {str(e)}")
print(f'Current Time: {time.strftime("%H:%M:%S")}. Sleeping for 30 seconds...')
time.sleep(30)

106
main.py.old Normal file
View File

@ -0,0 +1,106 @@
import time
import requests
from bs4 import BeautifulSoup
import re
# URL to scrape
url = "https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614"
# Cookie to include in the request
cookie = "JSESSIONID=5729EE2B769FE54D4663C1B7620E4B43"
# Headers for the GET request
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
"Accept-Language": "en-GB,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html?cvid=18456614",
"Connection": "keep-alive",
"Cookie": cookie,
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
}
def make_post_request(url, headers, payload, max_retries=3):
retries = 0
while retries < max_retries:
try:
response = requests.post(url, headers=headers, data=payload)
response.raise_for_status() # Raise an exception for 4xx or 5xx status codes
return response
except requests.exceptions.RequestException as e:
print(f"Error occurred during POST request: {str(e)}")
retries += 1
if retries < max_retries:
print(f"Retrying in 5 seconds... (Attempt {retries+1}/{max_retries})")
time.sleep(5)
else:
print("Max retries reached. Exiting.")
exit(1)
while True:
try:
print("Scraping...")
# Send GET request to the URL with the specified headers
response = requests.get(url, headers=headers)
response.raise_for_status() # Raise an exception for 4xx or 5xx status codes
# Parse the HTML content using BeautifulSoup
soup = BeautifulSoup(response.content, "html.parser")
# Find the <tr> element with class "dbu"
tr_elements = soup.find_all("tr", class_="dbu")
for tr_element in tr_elements:
th_element = tr_element.find("th")
if th_element and "Max. Teilnehmer" in th_element.text:
# Extract the max and current participant numbers
td_element = tr_element.find("td")
participant_info = td_element.text.strip()
# regex to find the numbers in a string like "10 (aktuell 10)"
regex = r"(\d+) \(aktuell (\d+)\)"
match = re.search(regex, participant_info)
if match:
max_participants = int(match.group(1))
current_participants = int(match.group(2))
print("Max participants:", max_participants, "; Current participants:", current_participants)
else:
print("Failed to parse participant info:", participant_info)
continue
# Check if there is a free spot
if current_participants < max_participants:
# Send POST request to participate
post_url = "https://campusonline.uni-ulm.de/CoronaNG/user/userDetails.html"
payload = "id=18456649&command=participate"
post_headers = headers.copy()
post_headers["Content-Type"] = "application/x-www-form-urlencoded"
post_headers["Content-Length"] = str(len(payload))
post_headers["Origin"] = "https://campusonline.uni-ulm.de"
post_response = make_post_request(post_url, post_headers, payload)
if post_response.status_code == 200:
print("Successfully participated!")
exit(0)
else:
print("Failed to participate. Status code:", post_response.status_code)
exit(1)
else:
print("No free spots available.")
break
else:
print("Participant information not found.")
except requests.exceptions.RequestException as e:
print(f"Error occurred during GET request: {str(e)}")
print(f'Current Time: {time.strftime("%H:%M:%S")}. Sleeping for 30 seconds...')
time.sleep(30)

211
poetry.lock generated Normal file
View File

@ -0,0 +1,211 @@
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
description = "Screen-scraping library"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
{file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
]
[package.dependencies]
soupsieve = ">1.2"
[package.extras]
cchardet = ["cchardet"]
chardet = ["chardet"]
charset-normalizer = ["charset-normalizer"]
html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
name = "bs4"
version = "0.0.2"
description = "Dummy package for Beautiful Soup (beautifulsoup4)"
optional = false
python-versions = "*"
files = [
{file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"},
{file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"},
]
[package.dependencies]
beautifulsoup4 = "*"
[[package]]
name = "certifi"
version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "idna"
version = "3.7"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "soupsieve"
version = "2.5"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
files = [
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
[[package]]
name = "urllib3"
version = "2.2.1"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "743a48ace825e632fbd59ba244398416c85baabad287a9f3626e96d165079c65"

16
pyproject.toml Normal file
View File

@ -0,0 +1,16 @@
[tool.poetry]
name = "coronang-autojoin"
version = "0.1.0"
description = ""
authors = ["Yandrik <me@yandrik.dev>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
bs4 = "^0.0.2"
requests = "^2.31.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

0
tests/__init__.py Normal file
View File