https://augustpinch.com/qnd7d2fhd?key=ed5dbed8ecdd9af92e2b54e652945382
8
Sunday, June 29, 2025
Friday, June 27, 2025
Sunday, June 22, 2025
Friday, June 20, 2025
BLOGPOST EARNING ESTIMATOR TOOL
š° Sponsored Blog Post Earnings Estimator
Thursday, June 19, 2025
REMOTE JOBS TOOL
import os
import json
import time
import smtplib
import logging
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from pathlib import Path
import requests
from dotenv import load_dotenv
# Optional: for scheduling within Python
import schedule
# Set up basic logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s')
# Load environment variables
load_dotenv()
SMTP_SERVER = os.getenv("SMTP_SERVER")
SMTP_PORT = int(os.getenv("SMTP_PORT", 587))
EMAIL_USER = os.getenv("EMAIL_USER")
EMAIL_PASS = os.getenv("EMAIL_PASS")
RECIPIENT_EMAIL = os.getenv("RECIPIENT_EMAIL")
KEYWORDS = [kw.strip().lower() for kw in os.getenv("KEYWORDS", "").split(",") if kw.strip()]
# File to store IDs of jobs already notified
SEEN_FILE = Path("seen_jobs.json")
def load_seen_jobs():
if SEEN_FILE.exists():
try:
with open(SEEN_FILE, "r") as f:
data = json.load(f)
if isinstance(data, list):
return set(data)
except Exception as e:
logging.warning(f"Could not read seen jobs file: {e}")
return set()
def save_seen_jobs(seen_set):
try:
with open(SEEN_FILE, "w") as f:
json.dump(list(seen_set), f)
except Exception as e:
logging.error(f"Error saving seen jobs file: {e}")
def fetch_jobs():
"""
Fetch job listings from RemoteOK JSON API.
RemoteOK API endpoint: https://remoteok.com/api
"""
url = "https://remoteok.com/api" # public endpoint
headers = {
"User-Agent": "Mozilla/5.0 (compatible; JobNotifier/1.0)"
}
try:
resp = requests.get(url, headers=headers, timeout=10)
resp.raise_for_status()
data = resp.json()
# The first element may be metadata; filter typical job dicts by presence of 'id' and 'position'
jobs = []
for item in data:
if isinstance(item, dict) and item.get("id") and item.get("position"):
jobs.append(item)
logging.info(f"Fetched {len(jobs)} job postings")
return jobs
except Exception as e:
logging.error(f"Error fetching jobs: {e}")
return []
def filter_jobs(jobs, keywords, seen_ids):
"""
Return list of job dicts matching any keyword and not in seen_ids.
"""
new_matches = []
for job in jobs:
job_id = str(job.get("id"))
if job_id in seen_ids:
continue
text = (job.get("position","") + " " + job.get("description","") + " " + job.get("company","")).lower()
if any(kw in text for kw in keywords):
new_matches.append(job)
logging.info(f"Found {len(new_matches)} new matching jobs")
return new_matches
def format_email(jobs):
"""
Create an HTML/plain text summary of the jobs.
"""
if not jobs:
return None, None
subject = f"{len(jobs)} new remote job(s) matching your keywords"
plain_lines = []
html_lines = ['']
html_lines.append(f"
{len(jobs)} new remote job(s) found
- ")
for job in jobs:
title = job.get("position")
company = job.get("company")
url = job.get("url") or job.get("apply_url") or job.get("url") # fields may vary
date = job.get("date") or job.get("date_posted", "")
snippet = job.get("description","")[:200].replace("\n", " ").strip()
plain_lines.append(f"- {title} at {company} ({date}): {url}")
html_lines.append(f"
- {title} at {company} ({date})
" f"{snippet}...
View Job ")
html_lines.append("
Tuesday, June 17, 2025
FREELANCE JOB FINDER TOOL
// Freelance Job Finder Tool (Frontend + API Fetch)
// This tool allows users to search for remote freelance jobs using a public API like Remotive.
Freelance Job Finder
Freelance Job Finder
Wednesday, June 11, 2025
ONLINE EARNING TOOL
import requests
import pandas as pd
from datetime import datetime
class OnlineEarningTool:
def __init__(self):
self.upwork_api_key = "YOUR_UPWORK_API_KEY"
self.amazon_api_key = "YOUR_AMAZON_AFFILIATE_KEY"
self.fiverr_api_key = "YOUR_FIVERR_API_KEY" # If available
self.earnings_data = []
def fetch_upwork_jobs(self, keyword="Python"):
"""Fetch latest Upwork jobs matching a keyword."""
url = f"https://api.upwork.com/api/v3/jobs?q={keyword}"
headers = {"Authorization": f"Bearer {self.upwork_api_key}"}
try:
response = requests.get(url, headers=headers)
jobs = response.json().get("jobs", [])
print(f"š Found {len(jobs)} Upwork jobs for '{keyword}'")
return jobs
except Exception as e:
print(f"❌ Upwork API Error: {e}")
return []
def auto_apply_upwork(self, jobs, min_pay=50):
"""Auto-apply to Upwork jobs with minimum pay."""
applied = 0
for job in jobs:
if job.get("budget", 0) >= min_pay:
job_id = job.get("id")
apply_url = f"https://upwork.com/jobs/{job_id}/apply"
print(f"✅ Applied to job: {job['title']} (${job['budget']})")
applied += 1
print(f"šØ Applied to {applied} jobs.")
return applied
def track_amazon_affiliate_earnings(self):
"""Check Amazon Affiliate earnings via API."""
url = "https://affiliate-api.amazon.com/report"
params = {
"api_key": self.amazon_api_key,
"report_type": "earnings"
}
try:
response = requests.get(url, params=params)
earnings = response.json().get("earnings", 0)
print(f"š° Amazon Affiliate Earnings: ${earnings:.2f}")
return earnings
except Exception as e:
print(f"❌ Amazon API Error: {e}")
return 0
def analyze_earnings(self):
"""Store and analyze earnings over time."""
df = pd.DataFrame(self.earnings_data)
if not df.empty:
df.to_csv("earnings_history.csv", index=False)
print("š Earnings data saved to 'earnings_history.csv'")
print(df.tail())
else:
print("No earnings data yet.")
def run(self):
"""Main automation loop."""
print("\nš Starting Online Earning Automation...")
# 1. Fetch & apply to Upwork jobs
jobs = self.fetch_upwork_jobs("Python")
self.auto_apply_upwork(jobs, min_pay=30)
# 2. Track Amazon Affiliate earnings
amazon_earnings = self.track_amazon_affiliate_earnings()
self.earnings_data.append({
"date": datetime.now().strftime("%Y-%m-%d"),
"source": "Amazon Affiliate",
"amount": amazon_earnings
})
# 3. Analyze & save data
self.analyze_earnings()
if __name__ == "__main__":
tool = OnlineEarningTool()
tool.run()
Monday, June 9, 2025
SYSTEM INFORMATION CHECKER TOOL full python
import platform
import socket
import psutil
import datetime
import shutil
import os
def get_size(bytes, suffix="B"):
"""Scale bytes to proper format e.g. KB, MB, GB"""
factor = 1024
for unit in ["", "K", "M", "G", "T", "P"]:
if bytes < factor:
return f"{bytes:.2f} {unit}{suffix}"
bytes /= factor
def system_info():
print("="*40, "System Information", "="*40)
print(f"System: {platform.system()}")
print(f"Node Name: {platform.node()}")
print(f"Release: {platform.release()}")
print(f"Version: {platform.version()}")
print(f"Machine: {platform.machine()}")
print(f"Processor: {platform.processor()}")
print(f"Architecture: {' '.join(platform.architecture())}")
print("="*100)
def cpu_info():
print("="*40, "CPU Info", "="*40)
print(f"Physical cores: {psutil.cpu_count(logical=False)}")
print(f"Total cores: {psutil.cpu_count(logical=True)}")
print(f"Max Frequency: {psutil.cpu_freq().max:.2f} Mhz")
print(f"Min Frequency: {psutil.cpu_freq().min:.2f} Mhz")
print(f"Current Frequency: {psutil.cpu_freq().current:.2f} Mhz")
print("="*100)
def memory_info():
print("="*40, "Memory Info", "="*40)
svmem = psutil.virtual_memory()
print(f"Total: {get_size(svmem.total)}")
print(f"Available: {get_size(svmem.available)}")
print(f"Used: {get_size(svmem.used)}")
print(f"Percentage: {svmem.percent}%")
print("="*100)
def disk_info():
print("="*40, "Disk Info", "="*40)
total, used, free = shutil.disk_usage("/")
print(f"Total: {get_size(total)}")
print(f"Used: {get_size(used)}")
print(f"Free: {get_size(free)}")
print("="*100)
def network_info():
print("="*40, "Network Info", "="*40)
hostname = socket.gethostname()
ip_address = socket.gethostbyname(hostname)
print(f"Hostname: {hostname}")
print(f"IP Address: {ip_address}")
print("="*100)
def uptime_info():
print("="*40, "Uptime", "="*40)
boot_time_timestamp = psutil.boot_time()
bt = datetime.datetime.fromtimestamp(boot_time_timestamp)
print(f"Boot Time: {bt.strftime('%Y-%m-%d %H:%M:%S')}")
uptime = datetime.datetime.now() - bt
print(f"Uptime: {str(uptime).split('.')[0]}")
print("="*100)
if __name__ == "__main__":
os.system('cls' if os.name == 'nt' else 'clear')
system_info()
cpu_info()
memory_info()
disk_info()
network_info()
uptime_info()
Thursday, June 5, 2025
YOUTUBE REVENUE ESTIMATOR TOOL
YouTube Revenue Estimator
Estimated Earnings: $0
Wednesday, June 4, 2025
Tuesday, June 3, 2025
AFFILIATE LINK GENERATOR TOOL
# Affiliate Link Generator Tool
# This script helps content creators generate affiliate links automatically.
from urllib.parse import urlencode
def generate_affiliate_link(base_url, affiliate_id, product_id=None, campaign=None):
"""
Generates an affiliate URL with optional campaign and product tracking.
:param base_url: The base URL of the product or store.
:param affiliate_id: The unique affiliate ID provided by the affiliate program.
:param product_id: Optional product ID.
:param campaign: Optional campaign identifier.
:return: A full affiliate tracking URL.
"""
params = {
'ref': affiliate_id
}
if product_id:
params['product'] = product_id
if campaign:
params['campaign'] = campaign
query_string = urlencode(params)
affiliate_url = f"{base_url}?{query_string}"
return affiliate_url
# Example usage
if __name__ == "__main__":
base = input("Enter the base product URL: ")
aff_id = input("Enter your affiliate ID: ")
prod_id = input("Enter product ID (optional): ") or None
camp = input("Enter campaign name (optional): ") or None
link = generate_affiliate_link(base, aff_id, prod_id, camp)
print("\nGenerated Affiliate Link:")
print(link)
Subscribe to:
Posts (Atom)
SMART AFFILIATE LINK GENERATOR
Smart Affiliate Link Generator š° Smart Affiliate Link Generator Enter your blog/content: Enter keywords and affili...
-
Smart Affiliate Link Generator š° Smart Affiliate Link Generator Enter your blog/content: Enter keywords and affili...
-
*Online Stock Photography: Sell Your Snaps* Are you a budding photographer with a penchant for snapping stunning images? Look no further! O...
-
AI Story Writing Tool AI Story Writing Tool Moral Educational Historical ...