Initial commit
This commit is contained in:
18
.gitignore
vendored
Normal file
18
.gitignore
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
*.pyc
|
||||
media/
|
||||
todo.txt
|
||||
/static/
|
||||
**/migrations/*.py
|
||||
!**/migrations/__init__.py
|
||||
db.sqlite3
|
||||
todos/
|
||||
celerybeat-schedule
|
||||
countries.mmdb
|
||||
tmp/
|
||||
*.session
|
||||
*.session-journal
|
||||
*.lock
|
||||
.coverage
|
||||
*.sublime-workspace
|
||||
locale/**/*.mo
|
||||
/data/
|
||||
48
clean.sh
Normal file
48
clean.sh
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/bin/sh
|
||||
|
||||
#Check the Drive Space Used by Cached Files
|
||||
du -sh /var/cache/apt/archives
|
||||
|
||||
#Clean all the log file
|
||||
#for logs in `find /var/log -type f`; do > $logs; done
|
||||
|
||||
logs=`find /var/log -type f`
|
||||
for i in $logs
|
||||
do
|
||||
> $i
|
||||
done
|
||||
|
||||
#Getting rid of partial packages
|
||||
apt-get clean && apt-get autoclean
|
||||
apt-get remove --purge -y software-properties-common
|
||||
|
||||
#Getting rid of no longer required packages
|
||||
apt-get autoremove -y
|
||||
|
||||
|
||||
#Getting rid of orphaned packages
|
||||
deborphan | xargs sudo apt-get -y remove --purge
|
||||
|
||||
#Free up space by clean out the cached packages
|
||||
apt-get clean
|
||||
|
||||
# Remove the Trash
|
||||
rm -rf /home/*/.local/share/Trash/*/**
|
||||
rm -rf /root/.local/share/Trash/*/**
|
||||
|
||||
# Remove Man
|
||||
rm -rf /usr/share/man/??
|
||||
rm -rf /usr/share/man/??_*
|
||||
|
||||
#Delete all .gz and rotated file
|
||||
find /var/log -type f -regex ".*\.gz$" | xargs rm -Rf
|
||||
find /var/log -type f -regex ".*\.[0-9]$" | xargs rm -Rf
|
||||
|
||||
#Cleaning the old kernels
|
||||
dpkg-query -l|grep linux-im*
|
||||
#dpkg-query -l |grep linux-im*|awk '{print $2}'
|
||||
apt-get purge $(dpkg -l 'linux-*' | sed '/^ii/!d;/'"$(uname -r | sed "s/\(.*\)-\([^0-9]\+\)/\1/")"'/d;s/^[^ ]* [^ ]* \([^ ]*\).*/\1/;/[0-9]/!d' | head -n -1) --assume-yes
|
||||
apt-get install linux-headers-`uname -r|cut -d'-' -f3`-`uname -r|cut -d'-' -f4`
|
||||
|
||||
#Cleaning is completed
|
||||
echo "Cleaning is completed"
|
||||
58
git/find.py
Normal file
58
git/find.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import hashlib
|
||||
import zlib
|
||||
import sys
|
||||
import os
|
||||
|
||||
def calculate_git_hash(file_path):
|
||||
# Read the file content
|
||||
with open(file_path, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
# Decompress the zlib-compressed content
|
||||
decompressed_content = zlib.decompress(content)
|
||||
|
||||
# Calculate the SHA-1 hash of the decompressed content
|
||||
sha1 = hashlib.sha1(decompressed_content).hexdigest()
|
||||
|
||||
return sha1
|
||||
|
||||
|
||||
def main(hash_to_find):
|
||||
files_processed = 0
|
||||
skipped = 0
|
||||
for root, dirs, files in os.walk('./writable'):
|
||||
for file in files:
|
||||
# if 'objects' not in root:
|
||||
# skipped += 1
|
||||
# continue
|
||||
if '.' in file:
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, file)
|
||||
|
||||
if os.path.islink(file_path):
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
print(file_path)
|
||||
try:
|
||||
file_hash = calculate_git_hash(file_path)
|
||||
if file_hash == hash_to_find:
|
||||
print(f"File with hash {hash_to_find} found at {file_path}")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
# print(f"Error processing {file_path}: {e}")
|
||||
print(f"Error processing {file_path}")
|
||||
files_processed += 1
|
||||
print(files_processed)
|
||||
|
||||
print(f'Not found. Processed: {files_processed}. Skipped: {skipped}')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python3 find_obj_file.py <hash_to_find>")
|
||||
sys.exit(1)
|
||||
|
||||
hash_to_find = sys.argv[1]
|
||||
main(hash_to_find)
|
||||
68
git/find_bulk.py
Normal file
68
git/find_bulk.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import hashlib
|
||||
import zlib
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
|
||||
def calculate_git_hash(file_path):
|
||||
# Read the file content
|
||||
with open(file_path, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
# Decompress the zlib-compressed content
|
||||
decompressed_content = zlib.decompress(content)
|
||||
|
||||
# Calculate the SHA-1 hash of the decompressed content
|
||||
sha1 = hashlib.sha1(decompressed_content).hexdigest()
|
||||
|
||||
return sha1
|
||||
|
||||
|
||||
def main(hashes):
|
||||
files_processed = 0
|
||||
skipped = 0
|
||||
remaining_hashes = set(hashes)
|
||||
for root, dirs, files in os.walk('./writable'):
|
||||
for file in files:
|
||||
# if 'objects' not in root:
|
||||
# skipped += 1
|
||||
# continue
|
||||
if '.' in file:
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, file)
|
||||
|
||||
if os.path.islink(file_path):
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
print(file_path)
|
||||
try:
|
||||
file_hash = calculate_git_hash(file_path)
|
||||
if file_hash in remaining_hashes:
|
||||
print(f"File with hash {file_hash} found at {file_path}")
|
||||
remaining_hashes.remove(file_hash)
|
||||
|
||||
destination_file = f'/var/www/test/useless/contests.git/objects/{file_hash}'
|
||||
shutil.copyfile(file_path, destination_file)
|
||||
if len(remaining_hashes) == 0:
|
||||
print('All hashes found.')
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
# print(f"Error processing {file_path}: {e}")
|
||||
print(f"Error processing {file_path}")
|
||||
files_processed += 1
|
||||
print(files_processed)
|
||||
|
||||
print(f'Not found. Processed: {files_processed}. Skipped: {skipped}')
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
hashes = [
|
||||
"1b38afd1a39bc0c2de0d01e513499d38a08e12b7",
|
||||
"84ee1354e2182ccb5445d103f3dcc54dbf682ec8",
|
||||
"e44d1404d642858babb9d45da0eba9ec410078f0",
|
||||
"9c16b13a81f70cba6e76195664c7da28f3b93dc6",
|
||||
]
|
||||
main(hashes)
|
||||
32
git/find_commits.sh
Normal file
32
git/find_commits.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Location of the Git objects directory.
|
||||
# Change this if your setup is different.
|
||||
OBJECTS_DIR="./objects"
|
||||
|
||||
# Loop over the 256 possible first two characters of the SHA-1 hash.
|
||||
for dir in $(ls -1 $OBJECTS_DIR); do
|
||||
# Ignore the 'info' and 'pack' special directories.
|
||||
if [ "$dir" == "info" ] || [ "$dir" == "pack" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Loop over the object files in each directory.
|
||||
for file in $(ls -1 $OBJECTS_DIR/$dir); do
|
||||
# Concatenate the directory and file names to get the full SHA-1 hash.
|
||||
full_hash="${dir}${file}"
|
||||
|
||||
# Use git cat-file to find the type of the object.
|
||||
obj_type=$(git cat-file -t $full_hash 2> /dev/null)
|
||||
|
||||
# If git cat-file fails, skip this object.
|
||||
if [ $? -ne 0 ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# If the object is a commit, print its hash.
|
||||
if [ "$obj_type" == "commit" ]; then
|
||||
echo "Found commit: $full_hash"
|
||||
fi
|
||||
done
|
||||
done
|
||||
25
git/get_hash.py
Normal file
25
git/get_hash.py
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
import hashlib
|
||||
import zlib
|
||||
import sys
|
||||
|
||||
def calculate_git_hash(file_path):
|
||||
# Read the file content
|
||||
with open(file_path, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
# Decompress the zlib-compressed content
|
||||
decompressed_content = zlib.decompress(content)
|
||||
|
||||
# Calculate the SHA-1 hash of the decompressed content
|
||||
sha1 = hashlib.sha1(decompressed_content).hexdigest()
|
||||
|
||||
return sha1
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python get_hash.py <path_to_git_object_file>")
|
||||
else:
|
||||
file_path = sys.argv[1]
|
||||
hash_value = calculate_git_hash(file_path)
|
||||
print(f"{hash_value}")
|
||||
37
programs/download youtube.py
Normal file
37
programs/download youtube.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import pytube
|
||||
|
||||
def get_video(url):
|
||||
print('----------------')
|
||||
print(f'Getting video {url}')
|
||||
youtube = pytube.YouTube(url)
|
||||
return youtube
|
||||
|
||||
def download_video(youtube, require_audio=False, resolution=None, ext='webm'):
|
||||
print(f'Found video {youtube.title}. Searching for streams...')
|
||||
streams = youtube.streams.filter(file_extension=ext)
|
||||
|
||||
# Filter resolution
|
||||
if resolution:
|
||||
streams = streams.filter(res=resolution)
|
||||
|
||||
# Filter audio
|
||||
if require_audio:
|
||||
streams = [x for x in streams if x.includes_audio_track]
|
||||
|
||||
if streams:
|
||||
stream = max(streams, key=lambda x: x.filesize)
|
||||
print(f'Found stream {stream}. Attempting download...')
|
||||
stream.download()
|
||||
print('Download complete')
|
||||
else:
|
||||
print('Failed to find stream')
|
||||
|
||||
|
||||
urls = [
|
||||
'https://www.youtube.com/watch?v=aChm3M8hZvo',
|
||||
'https://www.youtube.com/watch?v=x74OJ8A855Q',
|
||||
'https://www.youtube.com/watch?v=oNj8-yO0bWs'
|
||||
]
|
||||
for url in urls:
|
||||
yt = get_video(url)
|
||||
download_video(yt)
|
||||
38
scraper_utilities/angolajsontest.py
Normal file
38
scraper_utilities/angolajsontest.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Used for parsing saved json response from angola-based websites
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
with open('testing.json', 'r') as f:
|
||||
st = f.read()
|
||||
|
||||
decoded = json.loads(st)
|
||||
|
||||
products = decoded['hits']
|
||||
|
||||
for product in products:
|
||||
bad_conditions = [
|
||||
product['product_type'].lower() in ["gift card", "music"],
|
||||
not product['product_published'],
|
||||
'product' not in product
|
||||
]
|
||||
if any(bad_conditions):
|
||||
continue
|
||||
|
||||
if 'product' not in product:
|
||||
print(json.dumps(product))
|
||||
break
|
||||
|
||||
item_info = {
|
||||
"ShopSourceID": 1,
|
||||
"Url": 'https://www.jbhifi.com.au/products/' + product['handle'],
|
||||
"BaseUrl": "jbhifi.com.au",
|
||||
"SKU": product['sku'],
|
||||
"MPN": product['product']['supplierProductDetails'][0]['supplierStockCode'],
|
||||
"Model": product['product']['model'],
|
||||
"Title": product['title'],
|
||||
"PriceIncTax": product['price'],
|
||||
"IsInStock": product['availability']['canBuyOnline'],
|
||||
"QuantityAvailable": None
|
||||
}
|
||||
46
scraper_utilities/angolatesting.py
Normal file
46
scraper_utilities/angolatesting.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
Used for attempting to get data from angola-based websites.
|
||||
|
||||
"""
|
||||
|
||||
import requests
|
||||
import re
|
||||
import json
|
||||
from scrapy import Selector
|
||||
|
||||
|
||||
|
||||
xhr_headers = {
|
||||
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||
'content-type': 'application/json',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Connection': 'keep-alive',
|
||||
'X-Requested-With': 'XMLHttpRequest',
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/88.0',
|
||||
'x-algolia-api-key': 'YmE4MTVkMzc1YmU4ZDcxM2QxNTMzNDZlMzVlZjBjMTk4MmM5ZWU0NjBlN2I0NjE2NTk1M2VjZDg3MzQ1YjVmMXRhZ0ZpbHRlcnM9',
|
||||
'x-algolia-application-id': '3QIRNP5HAI'
|
||||
}
|
||||
|
||||
|
||||
url = 'https://3qirnp5hai-2.algolianet.com/1/indexes/*/queries?x-algolia-agent=Algolia for vanilla JavaScript (lite) 3.27.0;instantsearch.js 2.10.2;Magento2 integration (1.11.3);JS Helper 2.26.0'
|
||||
|
||||
# reqs = {
|
||||
# "query": "",
|
||||
# "hitsPerPage": 100,
|
||||
# "page": 0,
|
||||
# "filters": "product_published = 1 AND availability.displayProduct = 1"
|
||||
# }
|
||||
|
||||
reqs = {
|
||||
"requests": [
|
||||
{
|
||||
"indexName": "production_default_products_price_default_desc",
|
||||
"params": "query=&hitsPerPage=1000&filters=price.AUD.default < 500"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
r = requests.post(url, json.dumps(reqs), headers=xhr_headers)
|
||||
|
||||
with open("testing.json", "w") as f:
|
||||
f.write(r.text)
|
||||
48
scraper_utilities/cookies.py
Normal file
48
scraper_utilities/cookies.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Used for capturing cookies after a request to specific website
|
||||
"""
|
||||
|
||||
|
||||
from time import sleep
|
||||
import json
|
||||
from selenium import webdriver
|
||||
|
||||
|
||||
def check_captcha(driver, check_string):
|
||||
print('Checking for captcha')
|
||||
while check_string not in driver.page_source:
|
||||
print('Found captcha. Waiting.')
|
||||
sleep(5)
|
||||
print('Captcha not found, proceeding')
|
||||
|
||||
|
||||
def main():
|
||||
SHOP_LINK = 'https://www.mwave.com.au/'
|
||||
driver = webdriver.Firefox()
|
||||
driver.get(SHOP_LINK)
|
||||
|
||||
# check_captcha(driver, 'Your current connection has triggered our security challenge, please complete the chall')
|
||||
|
||||
cookies = driver.get_cookies()
|
||||
|
||||
user_agent = driver.execute_script("return navigator.userAgent;")
|
||||
print(user_agent)
|
||||
|
||||
# driver.find_element_by_tag_name('body').send_keys(Keys.COMMAND + 't')
|
||||
# driver.get(l)
|
||||
driver.close()
|
||||
|
||||
return cookies
|
||||
|
||||
|
||||
def decode_cookies(cookies):
|
||||
cookies_dict = {}
|
||||
for i in cookies:
|
||||
cookies_dict[i['name']] = i['value']
|
||||
otp = json.dumps(cookies_dict)
|
||||
with open('cookies.json', 'w') as f:
|
||||
f.write(otp)
|
||||
|
||||
|
||||
c = main()
|
||||
decode_cookies(c)
|
||||
21
scraper_utilities/htmltesting.py
Normal file
21
scraper_utilities/htmltesting.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Used for writing parsing scripts using saved html file
|
||||
Allows parsing a page without requesting the page each time
|
||||
"""
|
||||
|
||||
from scrapy import Selector
|
||||
import json
|
||||
|
||||
|
||||
|
||||
with open('testing.html', 'r') as f:
|
||||
st = f.read()
|
||||
|
||||
response = Selector(text=st)
|
||||
|
||||
# ==================
|
||||
article_ids = response.css('article[id]::attr(id)').getall()
|
||||
|
||||
links = {'https://voice.mv/%s/' % x.split('-')[1] for x in article_ids}
|
||||
|
||||
print(links)
|
||||
13
scraper_utilities/jsontesting.py
Normal file
13
scraper_utilities/jsontesting.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""
|
||||
Used for writing parsing scripts using saved html file
|
||||
Allows parsing a page without requesting the api each time
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
with open('testing.json', 'r') as f:
|
||||
st = f.read()
|
||||
|
||||
l = json.loads(st)
|
||||
|
||||
print(l)
|
||||
118
scraper_utilities/program.py
Normal file
118
scraper_utilities/program.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import requests
|
||||
from scrapy import Selector
|
||||
import json
|
||||
|
||||
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"accept": "application/json",
|
||||
"referer": "https://www.causeiq.com/directory/business-and-community-development-organizations-list/",
|
||||
"origin": "https://www.causeiq.com",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"filters": [],
|
||||
"pageNumber": 1,
|
||||
"sortDir": "desc",
|
||||
"sortHow": "popularity"
|
||||
}
|
||||
|
||||
url = 'https://www.causeiq.com/directory/business-and-community-development-organizations-list/'
|
||||
|
||||
r_init = requests.get(url, headers=headers)
|
||||
cookies = r_init.cookies
|
||||
print(r_init.headers['set-cookie'])
|
||||
headers['x-csrftoken'] = cookies['csrftoken']
|
||||
|
||||
|
||||
stations = requests.get('https://www.causeiq.com/directory/retrieve_variable/metros/')
|
||||
stations = json.loads(stations.text).keys()
|
||||
|
||||
|
||||
def get_stations():
|
||||
print('Total stations: ', len(stations))
|
||||
|
||||
total_companies = 0
|
||||
for i in stations:
|
||||
filters = [{
|
||||
"hash": "1",
|
||||
"type": "metro",
|
||||
"value": str(i)
|
||||
}]
|
||||
payload['filters'] = filters
|
||||
r = requests.post(
|
||||
url,
|
||||
json.dumps(payload),
|
||||
headers=headers,
|
||||
cookies=cookies
|
||||
)
|
||||
|
||||
decoded = json.loads(r.text)
|
||||
total_companies += decoded['count']
|
||||
s = 'Got metro %s with %s companies. Current total: %s' % (i, decoded['count'], total_companies)
|
||||
|
||||
|
||||
def get_full_station(station):
|
||||
filters = [{
|
||||
"hash": "1",
|
||||
"type": "metro",
|
||||
"value": station
|
||||
}]
|
||||
payload['filters'] = filters
|
||||
r = requests.post(
|
||||
url,
|
||||
json.dumps(payload),
|
||||
headers=headers,
|
||||
cookies=cookies
|
||||
)
|
||||
decoded = json.loads(r.text)
|
||||
print(decoded['count'])
|
||||
|
||||
letters = 'abcdefghijklmnopqrstuvwxyz'
|
||||
|
||||
total = 0
|
||||
for l in letters:
|
||||
filters = [
|
||||
{
|
||||
"hash": "1",
|
||||
"type": "metro",
|
||||
"value": station
|
||||
},
|
||||
{
|
||||
"hash": "2",
|
||||
"type": "keywords",
|
||||
"value": l
|
||||
}
|
||||
]
|
||||
payload['filters'] = filters
|
||||
r = requests.post(
|
||||
url,
|
||||
json.dumps(payload),
|
||||
headers=headers,
|
||||
cookies=cookies
|
||||
)
|
||||
decoded = json.loads(r.text)
|
||||
total += decoded['count']
|
||||
print('Got %s companies from letter %s. Total: %s' % (decoded['count'], l, total))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# print("one")
|
||||
|
||||
# print('two')
|
||||
# payload['pageNumber'] = 2
|
||||
# r = requests.post(
|
||||
# 'https://www.causeiq.com/directory/business-and-community-development-organizations-list/',
|
||||
# json.dumps(payload),
|
||||
# headers=headers,
|
||||
# cookies=cookies
|
||||
# )
|
||||
print("end")
|
||||
|
||||
# filename = 'responses.json'
|
||||
# with open(filename, 'w') as f:
|
||||
# f.write(r.text)
|
||||
27
scraper_utilities/request.py
Normal file
27
scraper_utilities/request.py
Normal file
@@ -0,0 +1,27 @@
|
||||
'''
|
||||
Used for requesting data from website.
|
||||
Generated files are targets for htmltesting/jsontesting scripts
|
||||
'''
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
|
||||
'token': 'b5ozEsESGV/jmwjy0v2MjRTYLnCozdQG2Kt1GOqfJ/1pKFgOi/1u5lyID2ZGcZSN0jaLB4pFpbJ5kJaFaiXyZ09eF4H6GXK3/x5euFTCOvWc8rqx22knnnJMgJY4q/mBbnf2+oJ4G5p3FU+Am2kPVP70OJ+oS/Gv18GtDXbVxrKozOiwaNrF6O7oEVmldDENCl34N0d3Pl7f53cYGKBWArkieRgLjbrjkEU2hMS++vuT+1JIAmW45OKpw1oT2ueQORgmZ5yaSw6xxOFpwoMvjIXSas81yUKvykHDvRFFeTaIAW6lyyLpQ/TC2rzntea4ASwCmn8XiHs3lkwP6OEvaQ==',
|
||||
'origin': 'https://cryptoslam.io',
|
||||
'accept': 'application/json, text/javascript, */*; q=0.01'
|
||||
# 'Cookie': 'XSRF-TOKEN=eyJpdiI6IkhBamFqSmVsdUNoT1F4WkF0NXdxSEE9PSIsInZhbHVlIjoiKzdQN1diREYwanJiSGFBVTZDV0tlSzdPY1BYelBOWDNad1RXTjk1anJqUnpmK2lqdXJUd3Q2TkJPa1p2TmtKQiIsIm1hYyI6IjkzMmE4YmY0YTk0OGU4YWFhMTMxMDZjMTY1MzU0ZTA4NzAxOTI5MmVmNGJmMWZiNmE3YmQ5ZGE5NDVhMDA3YmIifQ==; expires=Mon, 13-Jun-2022 09:55:00 GMT; Max-Age=15600000; path=/, one_online_session=eyJpdiI6IjRMaW83bnNlajBidmNabFg4d3ErYkE9PSIsInZhbHVlIjoiXC9wdFNwdWdIOTZLaU9HMkU2dzFSU0huYTdPa3c0Q3BmbGp5RFRvcHEyS01nelk0R1JNczRcL0lUdzhRa211MTByIiwibWFjIjoiNzI3N2NkOGE2YzY3ODBkZDg1ZjA1NGJhZTAxNzcwNDQ4NDc5NDRlMzYwNmJkYzY0N2JlYTBhMjU2YTM1M2YzYiJ9; expires=Mon, 13-Jun-2022 09:55:00 GMT; Max-Age=15600000; path=/; httponly',
|
||||
}
|
||||
|
||||
url = 'https://adhadhu.com/'
|
||||
data = '''{"draw":1,"columns":[{"data":null,"name":"","searchable":true,"orderable":false,"search":{"value":"","regex":false}},{"data":null,"name":"TimeStamp","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"Tokens.Attributes.Background","searchable":true,"orderable":false,"search":{"value":"","regex":false}},{"data":null,"name":"PriceDoubleType","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"PriceUSDDoubleType","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"Tokens.Attributes","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"Tokens.Attributes.SerialNumber","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"","searchable":true,"orderable":true,"search":{"value":"","regex":false}},{"data":null,"name":"","searchable":true,"orderable":false,"search":{"value":"","regex":false}}],"order":[{"column":1,"dir":"desc"}],"start":0,"length":50,"search":{"value":"","regex":false},"startDateHeader":"","endDateHeader":"","buyer":"","seller":"","attributesQuery":{},"marketplace":""}'''
|
||||
r = requests.post(url, data, headers=headers)
|
||||
# r = requests.get(url)
|
||||
print(r)
|
||||
print(r.text)
|
||||
|
||||
|
||||
filename = 'testing.html'
|
||||
with open(filename, 'w') as f:
|
||||
f.write(r.text)
|
||||
7
servers/.bash_aliases
Normal file
7
servers/.bash_aliases
Normal file
@@ -0,0 +1,7 @@
|
||||
alias skenv='source /var/www/venvs/$PROJECT_NAME/bin/activate'
|
||||
alias djrun='python manage.py runserver'
|
||||
alias djm='python manage.py'
|
||||
alias sctl='sudo supervisorctl'
|
||||
alias srv='sudo service'
|
||||
alias skdir='cd /var/www/$PROJECT_NAME'
|
||||
|
||||
30
servers/deployment/*-nginx.conf
Normal file
30
servers/deployment/*-nginx.conf
Normal file
@@ -0,0 +1,30 @@
|
||||
upstream ******* {
|
||||
server unix:///var/www/*******/deployment/uwsgi_nginx.sock;
|
||||
}
|
||||
|
||||
|
||||
server {
|
||||
charset utf-8;
|
||||
client_max_body_size 75M;
|
||||
listen 80;
|
||||
server_name ==========;
|
||||
|
||||
if ($host !~ ^(app\.example\.com|admin\.example\.com)$) {
|
||||
return 404;
|
||||
}
|
||||
|
||||
location / {
|
||||
uwsgi_pass *******;
|
||||
include /var/www/*******/deployment/uwsgi_params;
|
||||
}
|
||||
|
||||
location /static {
|
||||
# autoindex on;
|
||||
alias /var/www/*******/static/;
|
||||
}
|
||||
|
||||
location /media/ {
|
||||
# autoindex on;
|
||||
alias /var/www/*******/media/;
|
||||
}
|
||||
}
|
||||
16
servers/deployment/*-sv.conf
Normal file
16
servers/deployment/*-sv.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
[program:*******]
|
||||
command=uwsgi --emperor "/var/www/*******/deployment/*******-uwsgi.ini"
|
||||
stdout_logfile=/var/www/logs/*******/supervisor.log
|
||||
stderr_logfile=/var/www/logs/*******/supervisor_err.log
|
||||
autostart=true
|
||||
autorestart=true
|
||||
|
||||
|
||||
[program:*******_celery]
|
||||
command=/var/www/venvs/*******/bin/celery -A ******* worker --beat
|
||||
directory=/var/www/*******
|
||||
autostart=true
|
||||
autorestart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/var/www/logs/*******/supervisor_celery.log
|
||||
stderr_logfile=/var/www/logs/*******/supervisor_celery_err.log
|
||||
11
servers/deployment/*-uwsgi.ini
Normal file
11
servers/deployment/*-uwsgi.ini
Normal file
@@ -0,0 +1,11 @@
|
||||
[uwsgi]
|
||||
chdir = /var/www/*******
|
||||
module = *******.wsgi
|
||||
home = /var/www/venvs/*******
|
||||
plugins = python3
|
||||
virtualenv = /var/www/venvs/*******
|
||||
master = true
|
||||
processes = 10
|
||||
socket = /var/www/*******/deployment/uwsgi_nginx.sock
|
||||
chmod-socket = 666
|
||||
vacuum = true
|
||||
24
servers/deployment/scripts/env_setup.sh
Executable file
24
servers/deployment/scripts/env_setup.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if project_name argument is provided
|
||||
if [ -z "$PROJECT_NAME" ]; then
|
||||
echo "Error: PROJECT_NAME is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
project_name="$PROJECT_NAME"
|
||||
|
||||
# Create /var/www directory
|
||||
mkdir -p /var/www
|
||||
|
||||
# Change group of /var/www directory to oleg20111511
|
||||
chgrp oleg20111511 /var/www
|
||||
|
||||
# Change permissions of /var/www directory to 775
|
||||
chmod 775 /var/www
|
||||
|
||||
# Create /var/www/logs/"project_name" directory
|
||||
# mkdir -p "/var/www/logs/$project_name"
|
||||
|
||||
echo "Project setup complete."
|
||||
|
||||
23
servers/deployment/scripts/install_packages.sh
Executable file
23
servers/deployment/scripts/install_packages.sh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Update package lists
|
||||
sudo apt update
|
||||
|
||||
# Install Supervisor
|
||||
sudo apt install supervisor -y
|
||||
|
||||
# Install Nginx
|
||||
sudo apt install nginx -y
|
||||
|
||||
# Install redis
|
||||
sudo apt install redis -y
|
||||
|
||||
# Install certbot
|
||||
sudo apt install python3-certbot-nginx -y
|
||||
|
||||
# Install uWSGI and its dependencies
|
||||
sudo apt install uwsgi build-essential python3-dev -y
|
||||
|
||||
# Install the Python 3 plugin for uWSGI
|
||||
sudo apt install uwsgi-plugin-python3 -y
|
||||
|
||||
29
servers/deployment/scripts/install_postgresql.sh
Executable file
29
servers/deployment/scripts/install_postgresql.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if username and password arguments are provided
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Usage: $0 <username> <password>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
username=$1
|
||||
password=$2
|
||||
|
||||
# Install PostgreSQL
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql
|
||||
|
||||
# Create a new database named "jfl"
|
||||
sudo -u postgres psql -c "CREATE DATABASE $username;"
|
||||
|
||||
# Create a user with provided username and password
|
||||
sudo -u postgres psql -c "CREATE USER $username WITH PASSWORD '$password';"
|
||||
|
||||
# Grant all privileges on the "jfl" database to the user
|
||||
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $username TO $username;"
|
||||
|
||||
# Grant permission to create and delete databases
|
||||
sudo -u postgres psql -c "ALTER USER $username CREATEDB;"
|
||||
|
||||
echo "Database setup complete."
|
||||
|
||||
36
servers/deployment/scripts/link.sh
Executable file
36
servers/deployment/scripts/link.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if username and password arguments are provided
|
||||
if [ -z "$PROJECT_NAME" ]; then
|
||||
echo "Error: PROJECT_NAME is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
||||
project_name=$PROJECT_NAME
|
||||
|
||||
ln -s /var/www/$project_name/deployment/$project_name-nginx.conf /etc/nginx/sites-enabled/$project_name-nginx.conf
|
||||
|
||||
ln -s /var/www/$project_name/deployment/$project_name-sv.conf /etc/supervisor/conf.d/$project_name-sv.conf
|
||||
|
||||
|
||||
logs_dir="/var/www/logs"
|
||||
project_logs_dir="$logs_dir/$project_name"
|
||||
|
||||
# Create /var/www/logs directory if it doesn't exist
|
||||
if [ ! -d "$logs_dir" ]; then
|
||||
sudo mkdir -p "$logs_dir"
|
||||
sudo chown -R "$USER:$USER" "$logs_dir"
|
||||
sudo chmod -R 755 "$logs_dir"
|
||||
echo "Created directory $logs_dir"
|
||||
fi
|
||||
|
||||
# Create /var/www/logs/$project_name directory if it doesn't exist
|
||||
if [ ! -d "$project_logs_dir" ]; then
|
||||
sudo mkdir -p "$project_logs_dir"
|
||||
sudo chown -R "$USER:$USER" "$project_logs_dir"
|
||||
sudo chmod -R 755 "$project_logs_dir"
|
||||
echo "Created directory $project_logs_dir"
|
||||
fi
|
||||
|
||||
43
servers/deployment/scripts/update.sh
Executable file
43
servers/deployment/scripts/update.sh
Executable file
@@ -0,0 +1,43 @@
|
||||
#!/bin/bash
|
||||
|
||||
# echo "export PROJECT_NAME=value" >> ~/.bashrc
|
||||
|
||||
# Check if project_name arugment is provided
|
||||
# if [ $# -ne 1 ]; then
|
||||
# echo "Usage: $0 <project_name>"
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
# project_name=$1
|
||||
|
||||
|
||||
if [ -z "$PROJECT_NAME" ]; then
|
||||
echo "Error: PROJECT_NAME is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# Change to your desired directory
|
||||
cd /var/www/$PROJECT_NAME
|
||||
|
||||
# Perform a git pull
|
||||
git pull
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
# Activate virtual environment
|
||||
source /var/www/venvs/$PROJECT_NAME/bin/activate
|
||||
|
||||
# Collect static files
|
||||
python manage.py collectstatic --noinput
|
||||
|
||||
# Create database migrations
|
||||
python manage.py makemigrations
|
||||
|
||||
# Apply database migrations
|
||||
python manage.py migrate
|
||||
|
||||
# Restart supervisor processes
|
||||
sudo supervisorctl restart $PROJECT_NAME
|
||||
sudo supervisorctl restart "$PROJECT_NAME"_celery
|
||||
14
servers/deployment/uwsgi_params
Normal file
14
servers/deployment/uwsgi_params
Normal file
@@ -0,0 +1,14 @@
|
||||
uwsgi_param QUERY_STRING $query_string;
|
||||
uwsgi_param REQUEST_METHOD $request_method;
|
||||
uwsgi_param CONTENT_TYPE $content_type;
|
||||
uwsgi_param CONTENT_LENGTH $content_length;
|
||||
uwsgi_param REQUEST_URI $request_uri;
|
||||
uwsgi_param PATH_INFO $document_uri;
|
||||
uwsgi_param DOCUMENT_ROOT $document_root;
|
||||
uwsgi_param SERVER_PROTOCOL $server_protocol;
|
||||
uwsgi_param REQUEST_SCHEME $scheme;
|
||||
uwsgi_param HTTPS $https if_not_empty;
|
||||
uwsgi_param REMOTE_ADDR $remote_addr;
|
||||
uwsgi_param REMOTE_PORT $remote_port;
|
||||
uwsgi_param SERVER_PORT $server_port;
|
||||
uwsgi_param SERVER_NAME $server_name;
|
||||
40
useragents.txt
Normal file
40
useragents.txt
Normal file
@@ -0,0 +1,40 @@
|
||||
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/5321 (KHTML, like Gecko) Chrome/40.0.833.0 Mobile Safari/5321
|
||||
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/5340 (KHTML, like Gecko) Chrome/39.0.830.0 Mobile Safari/5340
|
||||
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_0 rv:5.0) Gecko/20130307 Firefox/35.0
|
||||
Opera/8.93 (Windows NT 5.0; sl-SI) Presto/2.11.232 Version/11.00
|
||||
Mozilla/5.0 (X11; Linux x86_64; rv:6.0) Gecko/20100808 Firefox/35.0
|
||||
Mozilla/5.0 (X11; Linux i686; rv:5.0) Gecko/20200511 Firefox/35.0
|
||||
Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/533.31.5 (KHTML, like Gecko) Version/5.0.3 Safari/533.31.5
|
||||
Opera/8.22 (X11; Linux x86_64; sl-SI) Presto/2.12.239 Version/11.00
|
||||
Mozilla/5.0 (iPad; CPU OS 8_2_1 like Mac OS X; sl-SI) AppleWebKit/533.20.3 (KHTML, like Gecko) Version/3.0.5 Mobile/8B115 Safari/6533.20.3
|
||||
Opera/9.86 (X11; Linux i686; en-US) Presto/2.10.272 Version/11.00
|
||||
Mozilla/5.0 (Windows NT 6.1; en-US; rv:1.9.2.20) Gecko/20190120 Firefox/37.0
|
||||
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_0) AppleWebKit/5332 (KHTML, like Gecko) Chrome/38.0.880.0 Mobile Safari/5332
|
||||
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.0)
|
||||
Mozilla/5.0 (Windows NT 6.1; sl-SI; rv:1.9.1.20) Gecko/20210610 Firefox/36.0
|
||||
Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_2 like Mac OS X; en-US) AppleWebKit/531.30.6 (KHTML, like Gecko) Version/3.0.5 Mobile/8B116 Safari/6531.30.6
|
||||
Opera/9.17 (Windows 98; sl-SI) Presto/2.9.304 Version/10.00
|
||||
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_5 rv:5.0) Gecko/20200121 Firefox/35.0
|
||||
Mozilla/5.0 (X11; Linux i686; rv:7.0) Gecko/20200927 Firefox/36.0
|
||||
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_7 rv:2.0; en-US) AppleWebKit/531.35.7 (KHTML, like Gecko) Version/5.0 Safari/531.35.7
|
||||
Mozilla/5.0 (X11; Linux i686; rv:6.0) Gecko/20141020 Firefox/36.0
|
||||
Mozilla/5.0 (Windows; U; Windows 95) AppleWebKit/532.7.1 (KHTML, like Gecko) Version/5.0 Safari/532.7.1
|
||||
Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20140715 Firefox/35.0
|
||||
Opera/9.21 (Windows NT 6.2; en-US) Presto/2.11.337 Version/12.00
|
||||
Mozilla/5.0 (Windows 98) AppleWebKit/5321 (KHTML, like Gecko) Chrome/40.0.838.0 Mobile Safari/5321
|
||||
Opera/8.42 (Windows NT 6.1; en-US) Presto/2.11.208 Version/11.00
|
||||
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0 rv:6.0; en-US) AppleWebKit/531.49.5 (KHTML, like Gecko) Version/4.0 Safari/531.49.5
|
||||
Opera/9.77 (X11; Linux i686; sl-SI) Presto/2.8.326 Version/12.00
|
||||
Mozilla/5.0 (iPhone; CPU iPhone OS 8_0_2 like Mac OS X; en-US) AppleWebKit/533.37.1 (KHTML, like Gecko) Version/3.0.5 Mobile/8B117 Safari/6533.37.1
|
||||
Mozilla/5.0 (compatible; MSIE 11.0; Windows NT 4.0; Trident/3.0)
|
||||
Mozilla/5.0 (Windows; U; Windows NT 5.01) AppleWebKit/535.24.3 (KHTML, like Gecko) Version/4.0.3 Safari/535.24.3
|
||||
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_0 rv:6.0; en-US) AppleWebKit/535.37.4 (KHTML, like Gecko) Version/4.0.4 Safari/535.37.4
|
||||
Mozilla/5.0 (compatible; MSIE 6.0; Windows 98; Win 9x 4.90; Trident/3.0)
|
||||
Mozilla/5.0 (Windows NT 5.2) AppleWebKit/5332 (KHTML, like Gecko) Chrome/36.0.871.0 Mobile Safari/5332
|
||||
Mozilla/5.0 (Windows; U; Windows CE) AppleWebKit/531.23.1 (KHTML, like Gecko) Version/4.0 Safari/531.23.1
|
||||
Mozilla/5.0 (Windows NT 5.2) AppleWebKit/5361 (KHTML, like Gecko) Chrome/40.0.857.0 Mobile Safari/5361
|
||||
Mozilla/5.0 (compatible; MSIE 5.0; Windows NT 6.2; Trident/5.0)
|
||||
Opera/9.36 (Windows NT 5.01; en-US) Presto/2.12.355 Version/11.00
|
||||
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_7_7 rv:2.0; sl-SI) AppleWebKit/534.8.3 (KHTML, like Gecko) Version/5.1 Safari/534.8.3
|
||||
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.01; Trident/4.0)
|
||||
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_0 rv:6.0; sl-SI) AppleWebKit/533.44.5 (KHTML, like Gecko) Version/5.1 Safari/533.44.5
|
||||
Reference in New Issue
Block a user