Compare commits
No commits in common. "2e439a7d4e5ba759424dbbafab8859b0969c6851" and "96c60e2441ffa363a536d20377411c7146b08286" have entirely different histories.
2e439a7d4e
...
96c60e2441
@ -1 +0,0 @@
|
||||
# Put your custom themes in this folder.
|
@ -2,6 +2,10 @@
|
||||
# insecure ownership or permissions) by:
|
||||
#
|
||||
# * Human-readably notifying the user of these insecurities.
|
||||
# * Moving away all existing completion caches to a temporary directory. Since
|
||||
# any of these caches may have been generated from insecure directories, they
|
||||
# are all suspect now. Failing to do so typically causes subsequent compinit()
|
||||
# calls to fail with "command not found: compdef" errors. (That's bad.)
|
||||
function handle_completion_insecurities() {
|
||||
# List of the absolute paths of all unique insecure directories, split on
|
||||
# newline from compaudit()'s output resembling:
|
||||
@ -18,27 +22,39 @@ function handle_completion_insecurities() {
|
||||
insecure_dirs=( ${(f@):-"$(compaudit 2>/dev/null)"} )
|
||||
|
||||
# If no such directories exist, get us out of here.
|
||||
(( ! ${#insecure_dirs} )) && return
|
||||
if (( ! ${#insecure_dirs} )); then
|
||||
print "[oh-my-zsh] No insecure completion-dependent directories detected."
|
||||
return
|
||||
fi
|
||||
|
||||
# List ownership and permissions of all insecure directories.
|
||||
print "[oh-my-zsh] Insecure completion-dependent directories detected:"
|
||||
ls -ld "${(@)insecure_dirs}"
|
||||
print "[oh-my-zsh] For safety, completions will be disabled until you manually fix all"
|
||||
print "[oh-my-zsh] insecure directory permissions and ownership and restart oh-my-zsh."
|
||||
print "[oh-my-zsh] See the above list for directories with group or other writability.\n"
|
||||
|
||||
cat <<EOD
|
||||
# Locally enable the "NULL_GLOB" option, thus removing unmatched filename
|
||||
# globs from argument lists *AND* printing no warning when doing so. Failing
|
||||
# to do so prints an unreadable warning if no completion caches exist below.
|
||||
setopt local_options null_glob
|
||||
|
||||
[oh-my-zsh] For safety, we will not load completions from these directories until
|
||||
[oh-my-zsh] you fix their permissions and ownership and restart zsh.
|
||||
[oh-my-zsh] See the above list for directories with group or other writability.
|
||||
# List of the absolute paths of all unique existing completion caches.
|
||||
local -aU zcompdump_files
|
||||
zcompdump_files=( "${ZSH_COMPDUMP}"(.) "${ZDOTDIR:-${HOME}}"/.zcompdump* )
|
||||
|
||||
[oh-my-zsh] To fix your permissions you can do so by disabling
|
||||
[oh-my-zsh] the write permission of "group" and "others" and making sure that the
|
||||
[oh-my-zsh] owner of these directories is either root or your current user.
|
||||
[oh-my-zsh] The following command may help:
|
||||
[oh-my-zsh] compaudit | xargs chmod g-w,o-w
|
||||
# Move such caches to a temporary directory.
|
||||
if (( ${#zcompdump_files} )); then
|
||||
# Absolute path of the directory to which such files will be moved.
|
||||
local ZSH_ZCOMPDUMP_BAD_DIR="${ZSH_CACHE_DIR}/zcompdump-bad"
|
||||
|
||||
[oh-my-zsh] If the above didn't help or you want to skip the verification of
|
||||
[oh-my-zsh] insecure directories you can set the variable ZSH_DISABLE_COMPFIX to
|
||||
[oh-my-zsh] "true" before oh-my-zsh is sourced in your zshrc file.
|
||||
# List such files first.
|
||||
print "[oh-my-zsh] Insecure completion caches also detected:"
|
||||
ls -l "${(@)zcompdump_files}"
|
||||
|
||||
EOD
|
||||
# For safety, move rather than permanently remove such files.
|
||||
print "[oh-my-zsh] Moving to \"${ZSH_ZCOMPDUMP_BAD_DIR}/\"...\n"
|
||||
mkdir -p "${ZSH_ZCOMPDUMP_BAD_DIR}"
|
||||
mv "${(@)zcompdump_files}" "${ZSH_ZCOMPDUMP_BAD_DIR}/"
|
||||
fi
|
||||
}
|
||||
|
@ -25,9 +25,6 @@ else
|
||||
fi
|
||||
unset CASE_SENSITIVE HYPHEN_INSENSITIVE
|
||||
|
||||
# Complete . and .. special directories
|
||||
zstyle ':completion:*' special-dirs true
|
||||
|
||||
zstyle ':completion:*' list-colors ''
|
||||
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#) ([0-9a-z-]#)*=01;34=0=01'
|
||||
|
||||
|
@ -28,3 +28,7 @@ alias lsa='ls -lah'
|
||||
alias l='ls -lah'
|
||||
alias ll='ls -lh'
|
||||
alias la='ls -lAh'
|
||||
|
||||
# Push and pop directories on directory stack
|
||||
alias pu='pushd'
|
||||
alias po='popd'
|
||||
|
@ -25,9 +25,7 @@ function open_command() {
|
||||
case "$OSTYPE" in
|
||||
darwin*) open_cmd='open' ;;
|
||||
cygwin*) open_cmd='cygstart' ;;
|
||||
linux*) [[ $(uname -a) =~ "Microsoft" ]] && \
|
||||
open_cmd='cmd.exe /c start' || \
|
||||
open_cmd='xdg-open' ;;
|
||||
linux*) open_cmd='xdg-open' ;;
|
||||
msys*) open_cmd='start ""' ;;
|
||||
*) echo "Platform $OSTYPE not supported"
|
||||
return 1
|
||||
|
@ -1,41 +1,24 @@
|
||||
## History wrapper
|
||||
function omz_history {
|
||||
# Delete the history file if `-c' argument provided.
|
||||
# This won't affect the `history' command output until the next login.
|
||||
zparseopts -E c=clear l=list
|
||||
## Command history configuration
|
||||
if [ -z "$HISTFILE" ]; then
|
||||
HISTFILE=$HOME/.zsh_history
|
||||
fi
|
||||
|
||||
if [[ -n "$clear" ]]; then
|
||||
# if -c provided, clobber the history file
|
||||
echo -n >| "$HISTFILE"
|
||||
echo >&2 History file deleted. Reload the session to see its effects.
|
||||
elif [[ -n "$list" ]]; then
|
||||
# if -l provided, run as if calling `fc' directly
|
||||
builtin fc "$@"
|
||||
else
|
||||
# otherwise, call `fc -l 1` to show all available
|
||||
# history (and pass additional parameters)
|
||||
builtin fc "$@" -l 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Timestamp format
|
||||
case $HIST_STAMPS in
|
||||
"mm/dd/yyyy") alias history='omz_history -f' ;;
|
||||
"dd.mm.yyyy") alias history='omz_history -E' ;;
|
||||
"yyyy-mm-dd") alias history='omz_history -i' ;;
|
||||
*) alias history='omz_history' ;;
|
||||
esac
|
||||
|
||||
## History file configuration
|
||||
[ -z "$HISTFILE" ] && HISTFILE="$HOME/.zsh_history"
|
||||
HISTSIZE=50000
|
||||
HISTSIZE=10000
|
||||
SAVEHIST=10000
|
||||
|
||||
## History command configuration
|
||||
setopt extended_history # record timestamp of command in HISTFILE
|
||||
setopt hist_expire_dups_first # delete duplicates first when HISTFILE size exceeds HISTSIZE
|
||||
setopt hist_ignore_dups # ignore duplicated commands history list
|
||||
setopt hist_ignore_space # ignore commands that start with space
|
||||
setopt hist_verify # show command with history expansion to user before running it
|
||||
setopt inc_append_history # add commands to HISTFILE in order of execution
|
||||
setopt share_history # share command history data
|
||||
# Show history
|
||||
case $HIST_STAMPS in
|
||||
"mm/dd/yyyy") alias history='fc -fl 1' ;;
|
||||
"dd.mm.yyyy") alias history='fc -El 1' ;;
|
||||
"yyyy-mm-dd") alias history='fc -il 1' ;;
|
||||
*) alias history='fc -l 1' ;;
|
||||
esac
|
||||
|
||||
setopt append_history
|
||||
setopt extended_history
|
||||
setopt hist_expire_dups_first
|
||||
setopt hist_ignore_dups # ignore duplication command history list
|
||||
setopt hist_ignore_space
|
||||
setopt hist_verify
|
||||
setopt inc_append_history
|
||||
setopt share_history # share command history data
|
||||
|
13
oh-my-zsh.sh
13
oh-my-zsh.sh
@ -11,6 +11,8 @@ fpath=($ZSH/functions $ZSH/completions $fpath)
|
||||
# Load all stock functions (from $fpath files) called below.
|
||||
autoload -U compaudit compinit
|
||||
|
||||
: ${ZSH_DISABLE_COMPFIX:=true}
|
||||
|
||||
# Set ZSH_CUSTOM to the path where your custom config files
|
||||
# and plugins exists, or else we will use the default custom/
|
||||
if [[ -z "$ZSH_CUSTOM" ]]; then
|
||||
@ -63,15 +65,16 @@ if [ -z "$ZSH_COMPDUMP" ]; then
|
||||
fi
|
||||
|
||||
if [[ $ZSH_DISABLE_COMPFIX != true ]]; then
|
||||
# If completion insecurities exist, warn the user
|
||||
# If completion insecurities exist, warn the user without enabling completions.
|
||||
if ! compaudit &>/dev/null; then
|
||||
# This function resides in the "lib/compfix.zsh" script sourced above.
|
||||
handle_completion_insecurities
|
||||
# Else, enable and cache completions to the desired file.
|
||||
else
|
||||
compinit -d "${ZSH_COMPDUMP}"
|
||||
fi
|
||||
# Load only from secure directories
|
||||
compinit -i -d "${ZSH_COMPDUMP}"
|
||||
else
|
||||
# If the user wants it, load from all found directories
|
||||
compinit -u -d "${ZSH_COMPDUMP}"
|
||||
compinit -i -d "${ZSH_COMPDUMP}"
|
||||
fi
|
||||
|
||||
# Load all of the plugins that were defined in ~/.zshrc
|
||||
|
@ -5,8 +5,3 @@ ASDF_DIR="${ASDF_DIR:-$HOME/.asdf}"
|
||||
if [ -f $ASDF_DIR/asdf.sh ]; then
|
||||
. $ASDF_DIR/asdf.sh
|
||||
fi
|
||||
|
||||
# Load asdf completions, if found.
|
||||
if [ -f $ASDF_DIR/completions/asdf.bash ]; then
|
||||
. $ASDF_DIR/completions/asdf.bash
|
||||
fi
|
||||
|
@ -37,15 +37,16 @@ function asp {
|
||||
}
|
||||
|
||||
function aws_profiles {
|
||||
reply=($(grep profile $AWS_HOME/config|sed -e 's/.*profile \([a-zA-Z0-9_\.-]*\).*/\1/'))
|
||||
reply=($(grep profile $AWS_HOME/config|sed -e 's/.*profile \([a-zA-Z0-9_-]*\).*/\1/'))
|
||||
}
|
||||
|
||||
compctl -K aws_profiles asp
|
||||
|
||||
if which aws_zsh_completer.sh &>/dev/null; then
|
||||
_aws_zsh_completer_path=$(which aws_zsh_completer.sh 2>/dev/null)
|
||||
elif _homebrew-installed && _awscli-homebrew-installed; then
|
||||
if _homebrew-installed && _awscli-homebrew-installed ; then
|
||||
_aws_zsh_completer_path=$_brew_prefix/libexec/bin/aws_zsh_completer.sh
|
||||
else
|
||||
_aws_zsh_completer_path=$(which aws_zsh_completer.sh)
|
||||
fi
|
||||
|
||||
[ -n "$_aws_zsh_completer_path" ] && [ -x $_aws_zsh_completer_path ] && source $_aws_zsh_completer_path
|
||||
[ -x $_aws_zsh_completer_path ] && source $_aws_zsh_completer_path
|
||||
unset _aws_zsh_completer_path
|
||||
|
2
plugins/chucknorris/LICENSE
Normal file
2
plugins/chucknorris/LICENSE
Normal file
@ -0,0 +1,2 @@
|
||||
License: GPL v2
|
||||
Thanks to http://www.k-lug.org/~kessler/projects.html for the fortune file.
|
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@
|
||||
# copydir plugin
|
||||
|
||||
Copies the path of your current folder to the system clipboard.
|
||||
|
||||
To use, add `copydir` to your plugins array:
|
||||
```
|
||||
plugins=(... copydir)
|
||||
```
|
||||
|
||||
Then use the command `copydir` to copy the $PWD.
|
@ -1,10 +0,0 @@
|
||||
# copyfile plugin
|
||||
|
||||
Puts the contents of a file in your system clipboard so you can paste it anywhere.
|
||||
|
||||
To use, add `copyfile` to your plugins array:
|
||||
```
|
||||
plugins=(... copyfile)
|
||||
```
|
||||
|
||||
Then you can run the command `copyfile <filename>` to copy the file named `filename`.
|
@ -2,10 +2,6 @@
|
||||
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
|
||||
# that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT.
|
||||
#
|
||||
# Navigate directory hierarchy using ALT-UP and ALT-DOWN. (mac keybindings not yet implemented)
|
||||
# ALT-UP moves to higher hierarchy (cd ..)
|
||||
# ALT-DOWN moves into the first directory found in alphabetical order
|
||||
#
|
||||
|
||||
dirhistory_past=($PWD)
|
||||
dirhistory_future=()
|
||||
@ -124,9 +120,7 @@ zle -N dirhistory_zle_dirhistory_back
|
||||
bindkey "\e[3D" dirhistory_zle_dirhistory_back
|
||||
bindkey "\e[1;3D" dirhistory_zle_dirhistory_back
|
||||
# Mac teminal (alt+left/right)
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[b" dirhistory_zle_dirhistory_back
|
||||
fi
|
||||
bindkey "^[b" dirhistory_zle_dirhistory_back
|
||||
# Putty:
|
||||
bindkey "\e\e[D" dirhistory_zle_dirhistory_back
|
||||
# GNU screen:
|
||||
@ -135,56 +129,8 @@ bindkey "\eO3D" dirhistory_zle_dirhistory_back
|
||||
zle -N dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[3C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[1;3C" dirhistory_zle_dirhistory_future
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[f" dirhistory_zle_dirhistory_future
|
||||
fi
|
||||
bindkey "^[f" dirhistory_zle_dirhistory_future
|
||||
bindkey "\e\e[C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\eO3C" dirhistory_zle_dirhistory_future
|
||||
|
||||
|
||||
#
|
||||
# HIERARCHY Implemented in this section, in case someone wants to split it to another plugin if it clashes bindings
|
||||
#
|
||||
|
||||
# Move up in hierarchy
|
||||
function dirhistory_up() {
|
||||
cd .. || return 1
|
||||
}
|
||||
|
||||
# Move down in hierarchy
|
||||
function dirhistory_down() {
|
||||
cd "$(find . -mindepth 1 -maxdepth 1 -type d | sort -n | head -n 1)" || return 1
|
||||
}
|
||||
|
||||
|
||||
# Bind keys to hierarchy navigation
|
||||
function dirhistory_zle_dirhistory_up() {
|
||||
zle kill-buffer # Erase current line in buffer
|
||||
dirhistory_up
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
function dirhistory_zle_dirhistory_down() {
|
||||
zle kill-buffer # Erase current line in buffer
|
||||
dirhistory_down
|
||||
zle accept-line
|
||||
}
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_up
|
||||
# xterm in normal mode
|
||||
bindkey "\e[3A" dirhistory_zle_dirhistory_up
|
||||
bindkey "\e[1;3A" dirhistory_zle_dirhistory_up
|
||||
# Mac teminal (alt+up)
|
||||
#bindkey "^[?" dirhistory_zle_dirhistory_up #dont know it
|
||||
# Putty:
|
||||
bindkey "\e\e[A" dirhistory_zle_dirhistory_up
|
||||
# GNU screen:
|
||||
bindkey "\eO3A" dirhistory_zle_dirhistory_up
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[3B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[1;3B" dirhistory_zle_dirhistory_down
|
||||
# Mac teminal (alt+down)
|
||||
#bindkey "^[?" dirhistory_zle_dirhistory_down #dont know it
|
||||
bindkey "\e\e[B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\eO3B" dirhistory_zle_dirhistory_down
|
||||
|
@ -450,9 +450,9 @@ __docker_complete_events_filter() {
|
||||
;;
|
||||
(event)
|
||||
local -a event_opts
|
||||
event_opts=('attach' 'commit' 'connect' 'copy' 'create' 'delete' 'destroy' 'detach' 'die' 'disable' 'disconnect' 'enable' 'exec_create' 'exec_detach'
|
||||
'exec_start' 'export' 'health_status' 'import' 'install' 'kill' 'load' 'mount' 'oom' 'pause' 'pull' 'push' 'reload' 'remove' 'rename' 'resize'
|
||||
'restart' 'save' 'start' 'stop' 'tag' 'top' 'unmount' 'unpause' 'untag' 'update')
|
||||
event_opts=('attach' 'commit' 'connect' 'copy' 'create' 'delete' 'destroy' 'detach' 'die' 'disconnect' 'exec_create' 'exec_detach'
|
||||
'exec_start' 'export' 'health_status' 'import' 'kill' 'load' 'mount' 'oom' 'pause' 'pull' 'push' 'reload' 'rename' 'resize' 'restart' 'save' 'start'
|
||||
'stop' 'tag' 'top' 'unmount' 'unpause' 'untag' 'update')
|
||||
_describe -t event-filter-opts "event filter options" event_opts && ret=0
|
||||
;;
|
||||
(image)
|
||||
@ -889,7 +889,7 @@ __docker_container_subcommand() {
|
||||
$opts_help \
|
||||
$opts_attach_exec_run_start \
|
||||
"($help -a --attach)"{-a,--attach}"[Attach container's stdout/stderr and forward all signals]" \
|
||||
"($help -i --interactive)"{-i,--interactive}"[Attach container's stdin]" \
|
||||
"($help -i --interactive)"{-i,--interactive}"[Attach container's stding]" \
|
||||
"($help -)*:containers:__docker_complete_stopped_containers" && ret=0
|
||||
;;
|
||||
(stats)
|
||||
|
@ -10,7 +10,7 @@
|
||||
# - Configuration changes made at runtime are applied to all frames.
|
||||
|
||||
|
||||
if "$ZSH/tools/require_tool.sh" emacsclient 24 2>/dev/null ; then
|
||||
if "$ZSH/tools/require_tool.sh" emacs 24 2>/dev/null ; then
|
||||
export EMACS_PLUGIN_LAUNCHER="$ZSH/plugins/emacs/emacsclient.sh"
|
||||
|
||||
# set EDITOR if not already defined.
|
||||
|
@ -6,6 +6,6 @@ if [ $commands[fasd] ]; then # check if fasd is installed
|
||||
source "$fasd_cache"
|
||||
unset fasd_cache
|
||||
|
||||
alias v="f -e \"$EDITOR\""
|
||||
alias v="f -e $EDITOR"
|
||||
alias o='a -e open_command'
|
||||
fi
|
||||
|
3
plugins/fedora/README.md
Normal file
3
plugins/fedora/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
This is a plugin based on yum plugin, but using dnf as main frontend
|
||||
(from Fedora 22 onwards, yum is deprecated in favor of dnf).
|
||||
|
@ -1 +0,0 @@
|
||||
../dnf/dnf.plugin.zsh
|
16
plugins/fedora/fedora.plugin.zsh
Normal file
16
plugins/fedora/fedora.plugin.zsh
Normal file
@ -0,0 +1,16 @@
|
||||
## Aliases
|
||||
|
||||
alias dnfs="dnf search" # search package
|
||||
alias dnfp="dnf info" # show package info
|
||||
alias dnfl="dnf list" # list packages
|
||||
alias dnfgl="dnf grouplist" # list package groups
|
||||
alias dnfli="dnf list installed" # print all installed packages
|
||||
alias dnfmc="dnf makecache" # rebuilds the dnf package list
|
||||
|
||||
alias dnfu="sudo dnf upgrade" # upgrade packages
|
||||
alias dnfi="sudo dnf install" # install package
|
||||
alias dnfgi="sudo dnf groupinstall" # install package group
|
||||
alias dnfr="sudo dnf remove" # remove package
|
||||
alias dnfgr="sudo dnf groupremove" # remove pagage group
|
||||
alias dnfrl="sudo dnf remove --remove-leaves" # remove package and leaves
|
||||
alias dnfc="sudo dnf clean all" # clean cache
|
@ -41,7 +41,7 @@ ahead, behind = 0, 0
|
||||
status = [(line[0], line[1], line[2:]) for line in stdout.decode('utf-8').splitlines()]
|
||||
for st in status:
|
||||
if st[0] == '#' and st[1] == '#':
|
||||
if re.search('Initial commit on', st[2]) or re.search('No commits yet on', st[2]):
|
||||
if re.search('Initial commit on', st[2]):
|
||||
branch = st[2].split(' ')[-1]
|
||||
elif re.search('no branch', st[2]): # detached status
|
||||
branch = get_tagname_or_hash()
|
||||
|
@ -180,10 +180,8 @@ alias glgg='git log --graph'
|
||||
alias glgga='git log --graph --decorate --all'
|
||||
alias glgm='git log --graph --max-count=10'
|
||||
alias glo='git log --oneline --decorate'
|
||||
alias glol="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset'"
|
||||
alias glod="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset'"
|
||||
alias glods="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset' --date=short"
|
||||
alias glola="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --all"
|
||||
alias glol="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit"
|
||||
alias glola="git log --graph --pretty='%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit --all"
|
||||
alias glog='git log --oneline --decorate --graph'
|
||||
alias gloga='git log --oneline --decorate --graph --all'
|
||||
alias glp="_git_log_prettily"
|
||||
|
@ -89,7 +89,6 @@ __hub_setup_zsh_fns () {
|
||||
browse:'browse the project on GitHub'
|
||||
compare:'open GitHub compare view'
|
||||
ci-status:'lookup commit in GitHub Status API'
|
||||
sync:'update local branches from upstream'
|
||||
)
|
||||
_describe -t hub-commands 'hub command' hub_commands && ret=0
|
||||
|
||||
@ -116,7 +115,6 @@ create
|
||||
browse
|
||||
compare
|
||||
ci-status
|
||||
sync
|
||||
EOF
|
||||
__git_list_all_commands_without_hub
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
# Set up hub wrapper for git, if it is available; http://github.com/github/hub
|
||||
if (( $+commands[hub] )); then
|
||||
alias git=hub
|
||||
if [ "$commands[(I)hub]" ]; then
|
||||
if hub --version &>/dev/null; then
|
||||
eval $(hub alias -s zsh)
|
||||
fi
|
||||
fi
|
||||
|
||||
# Functions #################################################################
|
||||
|
@ -1,7 +1,7 @@
|
||||
function gi() { curl -fL https://www.gitignore.io/api/${(j:,:)@} }
|
||||
function gi() { curl -sL https://www.gitignore.io/api/${(j:,:)@} }
|
||||
|
||||
_gitignoreio_get_command_list() {
|
||||
curl -fL https://www.gitignore.io/api/list | tr "," "\n"
|
||||
curl -sL https://www.gitignore.io/api/list | tr "," "\n"
|
||||
}
|
||||
|
||||
_gitignoreio () {
|
||||
|
@ -1,16 +1,14 @@
|
||||
# Enable gpg-agent if it is not running-
|
||||
# --use-standard-socket will work from version 2 upwards
|
||||
|
||||
AGENT_SOCK=$(gpgconf --list-dirs | grep agent-socket | cut -d : -f 2)
|
||||
|
||||
if [[ ! -S $AGENT_SOCK ]]; then
|
||||
gpg-agent --daemon --use-standard-socket &>/dev/null
|
||||
# Enable gpg-agent if it is not running
|
||||
GPG_AGENT_SOCKET="${XDG_RUNTIME_DIR}/gnupg/S.gpg-agent.ssh"
|
||||
if [ ! -S $GPG_AGENT_SOCKET ]; then
|
||||
gpg-agent --daemon >/dev/null 2>&1
|
||||
export GPG_TTY=$(tty)
|
||||
fi
|
||||
export GPG_TTY=$TTY
|
||||
|
||||
# Set SSH to use gpg-agent if it's enabled
|
||||
# Set SSH to use gpg-agent if it is configured to do so
|
||||
GNUPGCONFIG="${GNUPGHOME:-"$HOME/.gnupg"}/gpg-agent.conf"
|
||||
if [[ -r $GNUPGCONFIG ]] && command grep -q enable-ssh-support "$GNUPGCONFIG"; then
|
||||
export SSH_AUTH_SOCK="$AGENT_SOCK.ssh"
|
||||
if [ -r "$GNUPGCONFIG" ] && grep -q enable-ssh-support "$GNUPGCONFIG"; then
|
||||
unset SSH_AGENT_PID
|
||||
export SSH_AUTH_SOCK=$GPG_AGENT_SOCKET
|
||||
fi
|
||||
|
||||
|
@ -88,7 +88,7 @@ function _gradle_arguments() {
|
||||
# and if so, regenerate the .gradle_tasks cache file
|
||||
############################################################################
|
||||
_gradle_does_task_list_need_generating () {
|
||||
[[ ! -f .gradletasknamecache ]] || [[ build.gradle -nt .gradletasknamecache || build.gradle.kts -nt .gradletasknamecache ]]
|
||||
[[ ! -f .gradletasknamecache ]] || [[ build.gradle -nt .gradletasknamecache ]]
|
||||
}
|
||||
|
||||
##############
|
||||
@ -144,7 +144,7 @@ _gradle_parse_and_extract_tasks () {
|
||||
# Discover the gradle tasks by running "gradle tasks --all"
|
||||
############################################################################
|
||||
_gradle_tasks () {
|
||||
if [[ -f build.gradle || -f build.gradle.kts ]]; then
|
||||
if [[ -f build.gradle ]]; then
|
||||
_gradle_arguments
|
||||
if _gradle_does_task_list_need_generating; then
|
||||
_gradle_parse_and_extract_tasks "$(gradle tasks --all)" > .gradletasknamecache
|
||||
@ -154,7 +154,7 @@ _gradle_tasks () {
|
||||
}
|
||||
|
||||
_gradlew_tasks () {
|
||||
if [[ -f build.gradle || -f build.gradle.kts ]]; then
|
||||
if [[ -f build.gradle ]]; then
|
||||
_gradle_arguments
|
||||
if _gradle_does_task_list_need_generating; then
|
||||
_gradle_parse_and_extract_tasks "$(./gradlew tasks --all)" > .gradletasknamecache
|
||||
|
@ -45,18 +45,6 @@ _1st_arguments=(
|
||||
"logs\:drains":"manage syslog drains"
|
||||
"maintenance\:on":"put the app into maintenance mode"
|
||||
"maintenance\:off":"take the app out of maintenance mode"
|
||||
"pipelines":"list pipelines you have access to"
|
||||
"pipelines\:add":"add this app to a pipeline"
|
||||
"pipelines\:create":"create a new pipeline"
|
||||
"pipelines\:destroy":"destroy a pipeline"
|
||||
"pipelines\:diff":"compares the latest release of this app to its downstream app(s)"
|
||||
"pipelines\:info":"show list of apps in a pipeline"
|
||||
"pipelines\:list":"list pipelines you have access to"
|
||||
"pipelines\:open":"open a pipeline in dashboard"
|
||||
"pipelines\:promote":"promote the latest release of this app to its downstream app(s)"
|
||||
"pipelines\:remove":"remove this app from its pipeline"
|
||||
"pipelines\:rename":"rename a pipeline"
|
||||
"pipelines\:update":"update this app's stage in a pipeline"
|
||||
"pg\:credentials":"display the DATABASE credentials"
|
||||
"pg\:diagnose":"run diagnostics report on DATABASE"
|
||||
"pg\:info":"display database information"
|
||||
@ -72,7 +60,6 @@ _1st_arguments=(
|
||||
"pg\:unfollow":"stop a replica from following and make it a read/write database"
|
||||
"pg\:upgrade":"unfollow a database and upgrade it to the latest PostgreSQL version"
|
||||
"pg\:wait":"monitor database creation, exit when complete"
|
||||
"pg\:backups":"Interact with built-in backups"
|
||||
"pgbackups":"list captured backups"
|
||||
"pgbackups\:url":"get a temporary URL for a backup"
|
||||
"pgbackups\:capture":"capture a backup from a database id"
|
||||
@ -144,41 +131,6 @@ case "$words[1]" in
|
||||
'(-t|--tail)'{-t,--tail}'[continually stream logs]' \
|
||||
)
|
||||
;;
|
||||
pipelines)
|
||||
_command_args=(
|
||||
'(--json)'--json'[output in json format]' \
|
||||
)
|
||||
;;
|
||||
pipelines:add)
|
||||
_command_args=(
|
||||
'(-s|--stage)'{-s,--stage}'[stage of first app in pipeline]' \
|
||||
)
|
||||
;;
|
||||
pipelines:create)
|
||||
_command_args=(
|
||||
'(-s|--stage)'{-s,--stage}'[stage of first app in pipeline]' \
|
||||
)
|
||||
;;
|
||||
pipelines:info)
|
||||
_command_args=(
|
||||
'(--json)'--json'[output in json format]' \
|
||||
)
|
||||
;;
|
||||
pipelines:list)
|
||||
_command_args=(
|
||||
'(--json)'--json'[output in json format]' \
|
||||
)
|
||||
;;
|
||||
pipelines:promote)
|
||||
_command_args=(
|
||||
'(-t|--to)'{-t,--to}'[comma separated list of apps to promote to]' \
|
||||
)
|
||||
;;
|
||||
pipelines:update)
|
||||
_command_args=(
|
||||
'(-s|--stage)'{-s,--stage}'[stage of first app in pipeline]' \
|
||||
)
|
||||
;;
|
||||
pgbackups:capture)
|
||||
_command_args=(
|
||||
'(-e|--expire)'{-e,--expire}'[if no slots are available to capture, delete the oldest backup to make room]' \
|
||||
|
@ -1,13 +1,9 @@
|
||||
if (( $+commands[kubectl] )); then
|
||||
__KUBECTL_COMPLETION_FILE="${ZSH_CACHE_DIR}/kubectl_completion"
|
||||
# Autocompletion for kubectl, the command line interface for Kubernetes
|
||||
#
|
||||
# Author: https://github.com/pstadler
|
||||
|
||||
if [[ ! -f $__KUBECTL_COMPLETION_FILE ]]; then
|
||||
kubectl completion zsh >! $__KUBECTL_COMPLETION_FILE
|
||||
fi
|
||||
|
||||
[[ -f $__KUBECTL_COMPLETION_FILE ]] && source $__KUBECTL_COMPLETION_FILE
|
||||
|
||||
unset __KUBECTL_COMPLETION_FILE
|
||||
if [ $commands[kubectl] ]; then
|
||||
source <(kubectl completion zsh)
|
||||
fi
|
||||
|
||||
# This command is used ALOT both below and in daily life
|
||||
@ -37,12 +33,6 @@ alias kes='k edit svc'
|
||||
alias kds='k describe svc'
|
||||
alias kdels='k delete svc'
|
||||
|
||||
# Ingress management
|
||||
alias kgi='k get ingress'
|
||||
alias kei='k edit ingress'
|
||||
alias kdi='k describe ingress'
|
||||
alias kdeli='k delete ingress'
|
||||
|
||||
# Secret management
|
||||
alias kgsec='k get secret'
|
||||
alias kdsec='k describe secret'
|
||||
|
@ -4,10 +4,8 @@ typeset -g ZSH_LAST_WORKING_DIRECTORY
|
||||
# Updates the last directory once directory is changed
|
||||
chpwd_functions+=(chpwd_last_working_dir)
|
||||
chpwd_last_working_dir() {
|
||||
if [ "$ZSH_SUBSHELL" = 0 ]; then
|
||||
local cache_file="$ZSH_CACHE_DIR/last-working-dir"
|
||||
pwd >| "$cache_file"
|
||||
fi
|
||||
local cache_file="$ZSH_CACHE_DIR/last-working-dir"
|
||||
pwd >| "$cache_file"
|
||||
}
|
||||
|
||||
# Changes directory to the last working directory
|
||||
|
@ -1,6 +0,0 @@
|
||||
# Autocompletion for Minikube.
|
||||
#
|
||||
|
||||
if [ $commands[minikube] ]; then
|
||||
source <(minikube completion zsh)
|
||||
fi
|
@ -12,7 +12,7 @@ _mix_does_task_list_need_generating () {
|
||||
}
|
||||
|
||||
_mix_generate () {
|
||||
mix help | grep -v 'iex -S' | tail -n +2 | cut -d " " -f 2 > .mix_tasks
|
||||
mix --help | grep -v 'iex -S' | tail -n +2 | cut -d " " -f 2 > .mix_tasks
|
||||
}
|
||||
|
||||
_mix () {
|
||||
|
@ -21,15 +21,6 @@ _1st_arguments=(
|
||||
'deps.unlock:Unlock the given dependencies'
|
||||
'deps.update:Update the given dependencies'
|
||||
'do:Executes the tasks separated by comma'
|
||||
'ecto.create:Create Ecto database'
|
||||
'ecto.drop:Drop the storage for the given repository'
|
||||
'ecto.dump:Dumps the current environment’s database structure'
|
||||
'ecto.gen.migration:Generates a migration'
|
||||
'ecto.gen.repo:Generates a new repository'
|
||||
'ecto.load:Loads the current environment’s database structure'
|
||||
'ecto.migrate:Runs Ecto migration'
|
||||
'ecto.migrations:Displays the up / down migration status'
|
||||
'ecto.rollback:Reverts applied migrations'
|
||||
'escript.build:Builds an escript for the project'
|
||||
'help:Print help information for tasks'
|
||||
'hex:Print hex help information'
|
||||
|
@ -1,26 +0,0 @@
|
||||
## npm plugin
|
||||
|
||||
The npm plugin provides completion as well as adding many useful aliases.
|
||||
|
||||
To use it, add npm to the plugins array of your zshrc file:
|
||||
```
|
||||
plugins=(... npm)
|
||||
```
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Descripton |
|
||||
|:------ |:-----------------------------|:----------------------------------------------------------------|
|
||||
| `npmg` | `npm i -g` | Install dependencies globally |
|
||||
| `npmS` | `npm i -S` | Install and save to dependencies in your package.json |
|
||||
| `npmD` | `npm i -D` | Install and save to dev-dependencies in your package.json |
|
||||
| `npmE` | `PATH="$(npm bin)":"$PATH"` | Run command from node_modules folder based on current directory |
|
||||
| `npmO` | `npm outdated` | Check which npm modules are outdated |
|
||||
| `npmV` | `npm -v` | Check package versions |
|
||||
| `npmL` | `npm list` | List installed packages |
|
||||
| `npmL0` | `npm ls --depth=0` | List top-level installed packages |
|
||||
| `npmst` | `npm start` | Run npm start |
|
||||
| `npmt` | `npm test` | Run npm test |
|
||||
| `npmR` | `npm run` | Run npm scripts |
|
||||
| `npmP` | `npm publish` | Run npm publish |
|
||||
| `npmI` | `npm init` | Run npm init |
|
@ -11,21 +11,7 @@ This plugin automatically registers npx command-not-found handler if `npx` exist
|
||||
plugins=(.... npx)
|
||||
```
|
||||
|
||||
- Globally install npx binary (npx will be auto installed with recent versions of Node.js)
|
||||
- Globally install npx binary (you need node.js installed too!)
|
||||
```bash
|
||||
sudo npm install -g npx
|
||||
```
|
||||
|
||||
## Note
|
||||
|
||||
The shell auto-fallback doesn't auto-install plain packages. In order to get it to install something, you need to add `@`:
|
||||
|
||||
```
|
||||
➜ jasmine@latest # or just `jasmine@`
|
||||
npx: installed 13 in 1.896s
|
||||
Randomized with seed 54385
|
||||
Started
|
||||
```
|
||||
|
||||
It does it this way so folks using the fallback don't accidentally try to install regular typoes.
|
||||
|
||||
|
@ -1,41 +1,50 @@
|
||||
# This plugin loads pyenv into the current shell and provides prompt info via
|
||||
# the 'pyenv_prompt_info' function. Also loads pyenv-virtualenv if available.
|
||||
_homebrew-installed() {
|
||||
type brew &> /dev/null
|
||||
}
|
||||
|
||||
FOUND_PYENV=$+commands[pyenv]
|
||||
_pyenv-from-homebrew-installed() {
|
||||
brew --prefix pyenv &> /dev/null
|
||||
}
|
||||
|
||||
if [[ $FOUND_PYENV -ne 1 ]]; then
|
||||
pyenvdirs=("$HOME/.pyenv" "/usr/local/pyenv" "/opt/pyenv")
|
||||
for dir in $pyenvdirs; do
|
||||
if [[ -d $dir/bin ]]; then
|
||||
export PATH="$PATH:$dir/bin"
|
||||
FOUND_PYENV=1
|
||||
break
|
||||
FOUND_PYENV=0
|
||||
pyenvdirs=("$HOME/.pyenv" "/usr/local/pyenv" "/opt/pyenv")
|
||||
|
||||
for pyenvdir in "${pyenvdirs[@]}" ; do
|
||||
if [ -d $pyenvdir/bin -a $FOUND_PYENV -eq 0 ] ; then
|
||||
FOUND_PYENV=1
|
||||
export PYENV_ROOT=$pyenvdir
|
||||
export PATH=${pyenvdir}/bin:$PATH
|
||||
eval "$(pyenv init - zsh)"
|
||||
|
||||
if pyenv commands | command grep -q virtualenv-init; then
|
||||
eval "$(pyenv virtualenv-init - zsh)"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ $FOUND_PYENV -ne 1 ]]; then
|
||||
if (( $+commands[brew] )) && dir=$(brew --prefix pyenv 2>/dev/null); then
|
||||
if [[ -d $dir/bin ]]; then
|
||||
export PATH="$PATH:$dir/bin"
|
||||
FOUND_PYENV=1
|
||||
function pyenv_prompt_info() {
|
||||
echo "$(pyenv version-name)"
|
||||
}
|
||||
fi
|
||||
done
|
||||
unset pyenvdir
|
||||
|
||||
if [ $FOUND_PYENV -eq 0 ] ; then
|
||||
pyenvdir=$(brew --prefix pyenv 2> /dev/null)
|
||||
if [ $? -eq 0 -a -d $pyenvdir/bin ] ; then
|
||||
FOUND_PYENV=1
|
||||
export PYENV_ROOT=$pyenvdir
|
||||
export PATH=${pyenvdir}/bin:$PATH
|
||||
eval "$(pyenv init - zsh)"
|
||||
|
||||
if pyenv commands | command grep -q virtualenv-init; then
|
||||
eval "$(pyenv virtualenv-init - zsh)"
|
||||
fi
|
||||
|
||||
function pyenv_prompt_info() {
|
||||
echo "$(pyenv version-name)"
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $FOUND_PYENV -eq 1 ]]; then
|
||||
eval "$(pyenv init - zsh)"
|
||||
if (( $+commands[pyenv-virtualenv-init] )); then
|
||||
eval "$(pyenv virtualenv-init - zsh)"
|
||||
fi
|
||||
function pyenv_prompt_info() {
|
||||
echo "$(pyenv version-name)"
|
||||
}
|
||||
else
|
||||
# fallback to system python
|
||||
function pyenv_prompt_info() {
|
||||
echo "system: $(python -V 2>&1 | cut -f 2 -d ' ')"
|
||||
}
|
||||
if [ $FOUND_PYENV -eq 0 ] ; then
|
||||
function pyenv_prompt_info() { echo "system: $(python -V 2>&1 | cut -f 2 -d ' ')" }
|
||||
fi
|
||||
|
||||
unset FOUND_PYENV dir
|
||||
|
@ -51,9 +51,6 @@ _arguments \
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "rails subcommand" _1st_arguments
|
||||
return
|
||||
else
|
||||
_files
|
||||
return
|
||||
fi
|
||||
|
||||
case "$words[1]" in
|
||||
|
@ -88,14 +88,14 @@ _rustc_opts_vals=(
|
||||
--pretty='[Pretty-print the input instead of compiling]::TYPE:_values "TYPES" "$_rustc_pretty_types[@]"'
|
||||
--unpretty='[Present the input source, unstable (and less-pretty)]::TYPE:_values "TYPES" "$_rustc_unpretty_types[@]"'
|
||||
--color='[Configure coloring of output]:CONF:_values "COLORS" "$_rustc_color_types[@]"'
|
||||
{-v,--version}'[Print version info and exit]::VERBOSE:(verbose)'
|
||||
)
|
||||
|
||||
_rustc_opts_switches=(
|
||||
-g'[Equivalent to --debuginfo=2]'
|
||||
-O'[Equivalent to --opt-level=2]'
|
||||
--test'[Build a test harness]'
|
||||
{-v,--verbose}'[Use verbose output]'
|
||||
{-V,--version}'[Print version info and exit]'
|
||||
--verbose'[Use verbose output]'
|
||||
{-h,--help}'[Display this message]'
|
||||
--no-analysis'[Parse and expand the output, but run no analysis or produce output]'
|
||||
--no-trans'[Run all passes except translation; no output]'
|
||||
|
@ -88,18 +88,19 @@ shrink_path () {
|
||||
|
||||
if (( named )) {
|
||||
for part in ${(k)nameddirs}; {
|
||||
[[ $dir == ${nameddirs[$part]}(/*|) ]] && dir=${dir/#${nameddirs[$part]}/\~$part}
|
||||
[[ $dir == ${nameddirs[$part]}(/*|) ]] && dir=${dir/${nameddirs[$part]}/\~$part}
|
||||
}
|
||||
}
|
||||
(( tilde )) && dir=${dir/#$HOME/\~}
|
||||
(( tilde )) && dir=${dir/$HOME/\~}
|
||||
tree=(${(s:/:)dir})
|
||||
(
|
||||
unfunction chpwd 2> /dev/null
|
||||
if [[ $tree[1] == \~* ]] {
|
||||
cd -q ${~tree[1]}
|
||||
cd ${~tree[1]}
|
||||
result=$tree[1]
|
||||
shift tree
|
||||
} else {
|
||||
cd -q /
|
||||
cd /
|
||||
}
|
||||
for dir in $tree; {
|
||||
if (( lastfull && $#tree == 1 )) {
|
||||
@ -116,7 +117,7 @@ shrink_path () {
|
||||
(( short )) && break
|
||||
done
|
||||
result+="/$part"
|
||||
cd -q $dir
|
||||
cd $dir
|
||||
shift tree
|
||||
}
|
||||
echo ${result:-/}
|
||||
|
@ -11,8 +11,6 @@ plugins=(... ssh-agent)
|
||||
|
||||
## Instructions
|
||||
|
||||
**IMPORTANT: put these settings _before_ the line that sources oh-my-zsh**
|
||||
|
||||
To enable **agent forwarding support** add the following to your zshrc file:
|
||||
|
||||
```zsh
|
||||
|
@ -1,9 +1,5 @@
|
||||
# Sublime Text Aliases
|
||||
|
||||
() {
|
||||
|
||||
if [[ "$OSTYPE" == linux* ]]; then
|
||||
local _sublime_linux_paths
|
||||
local _sublime_linux_paths > /dev/null 2>&1
|
||||
_sublime_linux_paths=(
|
||||
"$HOME/bin/sublime_text"
|
||||
"/opt/sublime_text/sublime_text"
|
||||
@ -23,8 +19,9 @@ if [[ "$OSTYPE" == linux* ]]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
elif [[ "$OSTYPE" = darwin* ]]; then
|
||||
local _sublime_darwin_paths
|
||||
local _sublime_darwin_paths > /dev/null 2>&1
|
||||
_sublime_darwin_paths=(
|
||||
"/usr/local/bin/subl"
|
||||
"/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl"
|
||||
@ -41,9 +38,10 @@ elif [[ "$OSTYPE" = darwin* ]]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
elif [[ "$OSTYPE" = 'cygwin' ]]; then
|
||||
local sublime_cygwin_paths
|
||||
sublime_cygwin_paths=(
|
||||
local _sublime_cygwin_paths > /dev/null 2>&1
|
||||
_sublime_cygwin_paths=(
|
||||
"$(cygpath $ProgramW6432/Sublime\ Text\ 2)/sublime_text.exe"
|
||||
"$(cygpath $ProgramW6432/Sublime\ Text\ 3)/sublime_text.exe"
|
||||
)
|
||||
@ -54,9 +52,8 @@ elif [[ "$OSTYPE" = 'cygwin' ]]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
}
|
||||
fi
|
||||
|
||||
alias stt='st .'
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
## Terraform oh-my-zsh plugin
|
||||
## atom
|
||||
|
||||
Plugin for Terraform, a tool from Hashicorp for managing infrastructure safely and efficiently.
|
||||
|
||||
Current as of Terraform v0.11.7
|
||||
|
||||
### Requirements
|
||||
|
||||
* [Terraform](https://terraform.io/)
|
||||
@ -15,7 +13,7 @@ Current as of Terraform v0.11.7
|
||||
### Expanding ZSH prompt with current Terraform workspace name
|
||||
|
||||
If you want to get current Terraform workspace name in your ZSH prompt open
|
||||
your .zsh-theme file and in a chosen place insert:
|
||||
your .zsh-theme file and in a choosen place insert:
|
||||
|
||||
```
|
||||
$FG[045]\
|
||||
|
@ -3,151 +3,91 @@
|
||||
local -a _terraform_cmds
|
||||
_terraform_cmds=(
|
||||
'apply:Builds or changes infrastructure'
|
||||
'console:Interactive console for Terraform interpolations'
|
||||
'destroy:Destroy Terraform-managed infrastructure'
|
||||
'fmt:Rewrites config files to canonical format'
|
||||
'get:Download and install modules for the configuration'
|
||||
'graph:Create a visual graph of Terraform resources'
|
||||
'import:Import existing infrastructure into Terraform'
|
||||
'init:Initialize a Terraform working directory'
|
||||
'init:Initializes Terraform configuration from a module'
|
||||
'output:Read an output from a state file'
|
||||
'plan:Generate and show an execution plan'
|
||||
'providers:Prints a tree of the providers used in the configuration'
|
||||
'push:Upload this Terraform module to Atlas to run'
|
||||
'pull:Refreshes the local state copy from the remote server'
|
||||
'push:Uploads the local state to the remote server'
|
||||
'refresh:Update local state file against real resources'
|
||||
'remote:Configures remote state management'
|
||||
'show:Inspect Terraform state or plan'
|
||||
'taint:Manually mark a resource for recreation'
|
||||
'untaint:Manually unmark a resource as tainted'
|
||||
'validate:Validates the Terraform files'
|
||||
'taint:Manually forcing a destroy and recreate on the next plan/apply'
|
||||
'version:Prints the Terraform version'
|
||||
'workspace:Workspace management'
|
||||
)
|
||||
|
||||
__apply() {
|
||||
_arguments \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-auto-approve[Skip interactive approval of plan before applying.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-input=[(true) Ask for input for variables if not directly set.]' \
|
||||
'-no-color[If specified, output wil be colorless.]' \
|
||||
'-parallelism=[(10) Limit the number of parallel resource operations.]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-refresh=[(true) Update state prior to checking for differences. This has no effect if a plan file is given to apply.]' \
|
||||
'-state=[(terraform.tfstate) Path to read and save state (unless state-out is specified).]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
|
||||
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
|
||||
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]' \
|
||||
'-target=[(resource) A Resource Address to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
|
||||
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
|
||||
}
|
||||
|
||||
__console() {
|
||||
_arguments \
|
||||
'-state=[(terraform.tfstate) Path to read state.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
|
||||
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
|
||||
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
|
||||
}
|
||||
|
||||
__destroy() {
|
||||
_arguments \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-auto-approve[Skip interactive approval before destroying.]' \
|
||||
'-force[Deprecated: same as auto-approve.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-no-color[If specified, output will contain no color.]' \
|
||||
'-parallelism=[(10) Limit the number of concurrent operations.]' \
|
||||
'-force[If set, then the destroy confirmation will not be shown.]' \
|
||||
'-input=[(true) Ask for input for variables if not directly set.]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-refresh=[(true) Update state prior to checking for differences. This has no effect if a plan file is given to apply.]' \
|
||||
'-state=[(terraform.tfstate) Path to read and save state (unless state-out is specified).]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
|
||||
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
|
||||
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]' \
|
||||
'-target=[(resource) Instead of affecting "dependencies" will instead also destroy any resources that depend on the target(s) specified.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
|
||||
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
|
||||
}
|
||||
|
||||
__fmt() {
|
||||
_arguments \
|
||||
'-list=[(true) List files whose formatting differs (always false if using STDIN)]' \
|
||||
'-write=[(true) Write result to source file instead of STDOUT (always false if using STDIN or -check)]' \
|
||||
'-diff=[(false) Display diffs of formatting changes]' \
|
||||
'-check=[(false) Check if the input is formatted. Exit status will be 0 if all input is properly formatted and non-zero otherwise.]'
|
||||
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
|
||||
}
|
||||
|
||||
__get() {
|
||||
_arguments \
|
||||
'-update=[(false) If true, modules already downloaded will be checked for updates and updated if necessary.]' \
|
||||
'-no-color[If specified, output will contain no color.]'
|
||||
'-update=[(false) If true, modules already downloaded will be checked for updates and updated if necessary.]'
|
||||
}
|
||||
|
||||
__graph() {
|
||||
_arguments \
|
||||
'-draw-cycles[Highlight any cycles in the graph with colored edges. This helps when diagnosing cycle errors.]' \
|
||||
'-no-color[If specified, output will contain no color.]' \
|
||||
'-type=[(plan) Type of graph to output. Can be: plan, plan-destroy, apply, validate, input, refresh.]'
|
||||
}
|
||||
|
||||
__import() {
|
||||
_arguments \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-config=[(path) Path to a directory of Terraform configuration files to use to configure the provider. Defaults to pwd. If no config files are present, they must be provided via the input prompts or env vars.]' \
|
||||
'-allow-missing-config[Allow import when no resource configuration block exists.]' \
|
||||
'-input=[(true) Ask for input for variables if not directly set.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-no-color[If specified, output will contain no color.]' \
|
||||
'-provider=[(provider) Specific provider to use for import. This is used for specifying aliases, such as "aws.eu". Defaults to the normal provider prefix of the resource being imported.]' \
|
||||
'-state=[(PATH) Path to the source state file. Defaults to the configured backend, or "terraform.tfstate"]' \
|
||||
'-state-out=[(PATH) Path to the destination state file to write to. If this is not specified, the source state file will be used. This can be a new or existing path.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times. This is only useful with the "-config" flag.]' \
|
||||
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]'
|
||||
'-module-depth=[(n) The maximum depth to expand modules. By default this is zero, which will not expand modules at all.]' \
|
||||
'-verbose[Generate a verbose, "worst-case" graph, with all nodes for potential operations in place.]'
|
||||
}
|
||||
|
||||
__init() {
|
||||
_arguments \
|
||||
'-backend=[(true) Configure the backend for this configuration.]' \
|
||||
'-backend-config=[This can be either a path to an HCL file with key/value assignments (same format as terraform.tfvars) or a 'key=value' format. This is merged with what is in the configuration file. This can be specified multiple times. The backend type must be in the configuration itself.]' \
|
||||
'-force-copy[Suppress prompts about copying state data. This is equivalent to providing a "yes" to all confirmation prompts.]' \
|
||||
'-from-module=[Copy the contents of the given module into the target directory before initialization.]' \
|
||||
'-get=[(true) Download any modules for this configuration.]' \
|
||||
'-get-plugins=[(true) Download any missing plugins for this configuration.]' \
|
||||
'-input=[(true) Ask for input if necessary. If false, will error if input was required.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-no-color[If specified, output will contain no color.]' \
|
||||
'-plugin-dir[Directory containing plugin binaries. This overrides all default search paths for plugins, and prevents the automatic installation of plugins. This flag can be used multiple times.]' \
|
||||
'-reconfigure[Reconfigure the backend, ignoring any saved configuration.]' \
|
||||
'-upgrade=[(false) If installing modules (-get) or plugins (-get-plugins), ignore previously-downloaded objects and install the latest version allowed within configured constraints.]' \
|
||||
'-verify-plugins=[(true) Verify the authenticity and integrity of automatically downloaded plugins.]'
|
||||
'-address=[(url) URL of the remote storage server. Required for HTTP backend, optional for Atlas and Consul.]' \
|
||||
'-access-token=[(token) Authentication token for state storage server. Required for Atlas backend, optional for Consul.]' \
|
||||
'-backend=[(atlas) Specifies the type of remote backend. Must be one of Atlas, Consul, or HTTP. Defaults to atlas.]' \
|
||||
'-backend-config=[(path) Specifies the path to remote backend config file.]' \
|
||||
'-name=[(name) Name of the state file in the state storage server. Required for Atlas backend.]' \
|
||||
'-path=[(path) Path of the remote state in Consul. Required for the Consul backend.]'
|
||||
}
|
||||
|
||||
__output() {
|
||||
_arguments \
|
||||
'-state=[(path) Path to the state file to read. Defaults to "terraform.tfstate".]' \
|
||||
'-no-color[ If specified, output will contain no color.]' \
|
||||
'-module=[(name) If specified, returns the outputs for a specific module]' \
|
||||
'-json[If specified, machine readable output will be printed in JSON format]'
|
||||
'-module=[(module_name) The module path which has needed output. By default this is the root path. Other modules can be specified by a period-separated list.]'
|
||||
}
|
||||
|
||||
__plan() {
|
||||
_arguments \
|
||||
'-destroy[() If set, a plan will be generated to destroy all resources managed by the given configuration and state.]' \
|
||||
'-detailed-exitcode[() Return detailed exit codes when the command exits. This will change the meaning of exit codes to: 0 - Succeeded, diff is empty (no changes); 1 - Errored, 2 - Succeeded; there is a diff]' \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with" .backup" extension. Set to "-" to disable backup.]' \
|
||||
'-destroy[If set, a plan will be generated to destroy all resources managed by the given configuration and state.]' \
|
||||
'-detailed-exitcode[Return a detailed exit code when the command exits. When provided, this argument changes the exit codes and their meanings to provide more granular information about what the resulting plan contains]' \
|
||||
'-input=[(true) Ask for input for variables if not directly set.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-module-depth=[(n) Specifies the depth of modules to show in the output. This does not affect the plan itself, only the output shown. By default, this is -1, which will expand all.]' \
|
||||
'-no-color[() If specified, output will contain no color.]' \
|
||||
'-module-depth=[(n) Specifies the depth of modules to show in the output. This does not affect the plan itself, only the output shown. By default, this is zero. -1 will expand all.]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-out=[(path) Write a plan file to the given path. This can be used as input to the "apply" command.]' \
|
||||
'-parallelism=[(10) Limit the number of concurrent operations.]' \
|
||||
'-refresh=[(true) Update state prior to checking for differences.]' \
|
||||
'-state=[(statefile) Path to a Terraform state file to use to look up Terraform-managed resources. By default it will use the state "terraform.tfstate" if it exists.]' \
|
||||
'-target=[(resource) Resource to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]' \
|
||||
'-target=[(resource) A Resource Address to target. Operation will be limited to this resource and its dependencies. This flag can be used multiple times.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
|
||||
'-var-file=[(foo) Set variables in the Terraform configuration from a file. If "terraform.tfvars" or any ".auto.tfvars" files are present, they will be automatically loaded.]' \
|
||||
}
|
||||
|
||||
__providers() {
|
||||
_arguments \
|
||||
|
||||
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
|
||||
}
|
||||
|
||||
__push() {
|
||||
@ -166,9 +106,6 @@ __push() {
|
||||
__refresh() {
|
||||
_arguments \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-input=[(true) Ask for input for variables if not directly set.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
|
||||
'-state-out=[(path) Path to write state to that is different than "-state". This can be used to preserve the old state.]' \
|
||||
@ -177,6 +114,19 @@ __refresh() {
|
||||
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
|
||||
}
|
||||
|
||||
__remote() {
|
||||
_arguments \
|
||||
'-address=[(url) URL of the remote storage server. Required for HTTP backend, optional for Atlas and Consul.]' \
|
||||
'-access-token=[(token) Authentication token for state storage server. Required for Atlas backend, optional for Consul.]' \
|
||||
'-backend=[(atlas) Specifies the type of remote backend. Must be one of Atlas, Consul, or HTTP. Defaults to atlas.]' \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-disable[Disables remote state management and migrates the state to the -state path.]' \
|
||||
'-name=[(name) Name of the state file in the state storage server. Required for Atlas backend.]' \
|
||||
'-path=[(path) Path of the remote state in Consul. Required for the Consul backend.]' \
|
||||
'-pull=[(true) Controls if the remote state is pulled before disabling. This defaults to true to ensure the latest state is cached before disabling.]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]'
|
||||
}
|
||||
|
||||
__show() {
|
||||
_arguments \
|
||||
'-module-depth=[(n) The maximum depth to expand modules. By default this is zero, which will not expand modules at all.]' \
|
||||
@ -187,46 +137,12 @@ __taint() {
|
||||
_arguments \
|
||||
'-allow-missing[If specified, the command will succeed (exit code 0) even if the resource is missing.]' \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-module=[(path) The module path where the resource lives. By default this will be root. Child modules can be specified by names. Ex. "consul" or "consul.vpc" (nested modules).]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
|
||||
'-state-out=[(path) Path to write updated state file. By default, the "-state" path will be used.]'
|
||||
}
|
||||
|
||||
__untaint() {
|
||||
_arguments \
|
||||
'-allow-missing[If specified, the command will succeed (exit code 0) even if the resource is missing.]' \
|
||||
'-backup=[(path) Path to backup the existing state file before modifying. Defaults to the "-state-out" path with ".backup" extension. Set to "-" to disable backup.]' \
|
||||
'-lock=[(true) Lock the state file when locking is supported.]' \
|
||||
'-lock-timeout=[(0s) Duration to retry a state lock.]' \
|
||||
'-module=[(path) The module path where the resource lives. By default this will be root. Child modules can be specified by names. Ex. "consul" or "consul.vpc" (nested modules).]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-state=[(path) Path to read and save state (unless state-out is specified). Defaults to "terraform.tfstate".]' \
|
||||
'-state-out=[(path) Path to write updated state file. By default, the "-state" path will be used.]'
|
||||
}
|
||||
|
||||
__validate() {
|
||||
_arguments \
|
||||
'-check-variables=[(true) If set to true (default), the command will check whether all required variables have been specified.]' \
|
||||
'-no-color[If specified, output will not contain any color.]' \
|
||||
'-var[("foo=bar") Set a variable in the Terraform configuration. This flag can be set multiple times.]' \
|
||||
'-var-file=[(path) Set variables in the Terraform configuration from a file. If "terraform.tfvars" is present, it will be automatically loaded if this flag is not specified.]'
|
||||
}
|
||||
|
||||
__workspace() {
|
||||
local -a __workspace_cmds
|
||||
__workspace_cmds=(
|
||||
'delete:Delete a workspace'
|
||||
'list:List Workspaces'
|
||||
'new:Create a new workspace'
|
||||
'select:Select a workspace'
|
||||
'show:Show the name of the current workspace'
|
||||
)
|
||||
_describe -t workspace "workspace commands" __workspace_cmds
|
||||
}
|
||||
|
||||
_arguments '*:: :->command'
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
@ -238,38 +154,26 @@ local -a _command_args
|
||||
case "$words[1]" in
|
||||
apply)
|
||||
__apply ;;
|
||||
console)
|
||||
__console;;
|
||||
destroy)
|
||||
__destroy ;;
|
||||
fmt)
|
||||
__fmt;;
|
||||
get)
|
||||
__get ;;
|
||||
graph)
|
||||
__graph ;;
|
||||
import)
|
||||
__import;;
|
||||
init)
|
||||
__init ;;
|
||||
output)
|
||||
__output ;;
|
||||
plan)
|
||||
__plan ;;
|
||||
providers)
|
||||
__providers ;;
|
||||
push)
|
||||
__push ;;
|
||||
refresh)
|
||||
__refresh ;;
|
||||
remote)
|
||||
__remote ;;
|
||||
show)
|
||||
__show ;;
|
||||
taint)
|
||||
__taint ;;
|
||||
untaint)
|
||||
__untaint ;;
|
||||
validate)
|
||||
__validate ;;
|
||||
workspace)
|
||||
test $CURRENT -lt 3 && __workspace ;;
|
||||
esac
|
||||
|
@ -1,24 +0,0 @@
|
||||
# `transfer` plugin
|
||||
|
||||
[`transfer.sh`](https://transfer.sh) is an easy to use file sharing service from the command line
|
||||
|
||||
## Usage
|
||||
|
||||
Add `transfer` to your plugins array in your zshrc file:
|
||||
```zsh
|
||||
plugins=(... transfer)
|
||||
```
|
||||
|
||||
Then you can:
|
||||
|
||||
- transfer a file:
|
||||
|
||||
```zsh
|
||||
transfer file.txt
|
||||
```
|
||||
|
||||
- transfer a whole directory (it will be automatically compressed):
|
||||
|
||||
```zsh
|
||||
transfer directory/
|
||||
```
|
@ -1,67 +0,0 @@
|
||||
# transfer.sh Easy file sharing from the command line
|
||||
# transfer Plugin
|
||||
# Usage Example :
|
||||
# > transfer file.txt
|
||||
# > transfer directory/
|
||||
|
||||
|
||||
|
||||
# Author:
|
||||
# Remco Verhoef <remco@dutchcoders.io>
|
||||
# https://gist.github.com/nl5887/a511f172d3fb3cd0e42d
|
||||
# Modified to use tar command instead of zip
|
||||
#
|
||||
|
||||
curl --version 2>&1 > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Could not find curl."
|
||||
return 1
|
||||
fi
|
||||
|
||||
transfer() {
|
||||
# check arguments
|
||||
if [ $# -eq 0 ];
|
||||
then
|
||||
echo "No arguments specified. Usage:\necho transfer /tmp/test.md\ncat /tmp/test.md | transfer test.md"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# get temporarily filename, output is written to this file show progress can be showed
|
||||
tmpfile=$( mktemp -t transferXXX )
|
||||
|
||||
# upload stdin or file
|
||||
file=$1
|
||||
|
||||
if tty -s;
|
||||
then
|
||||
basefile=$(basename "$file" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
|
||||
|
||||
if [ ! -e $file ];
|
||||
then
|
||||
echo "File $file doesn't exists."
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ -d $file ];
|
||||
then
|
||||
echo $file
|
||||
# tar directory and transfer
|
||||
tarfile=$( mktemp -t transferXXX.tar.gz )
|
||||
cd $(dirname $file) && tar -czf $tarfile $(basename $file)
|
||||
curl --progress-bar --upload-file "$tarfile" "https://transfer.sh/$basefile.tar.gz" >> $tmpfile
|
||||
rm -f $tarfile
|
||||
else
|
||||
# transfer file
|
||||
curl --progress-bar --upload-file "$file" "https://transfer.sh/$basefile" >> $tmpfile
|
||||
fi
|
||||
else
|
||||
# transfer pipe
|
||||
curl --progress-bar --upload-file "-" "https://transfer.sh/$file" >> $tmpfile
|
||||
fi
|
||||
|
||||
# cat output link
|
||||
cat $tmpfile
|
||||
|
||||
# cleanup
|
||||
rm -f $tmpfile
|
||||
}
|
@ -5,21 +5,18 @@
|
||||
# https://github.com/trinaldi
|
||||
# Nicolas Jonas nextgenthemes.com
|
||||
# https://github.com/loctauxphilippe
|
||||
# https://github.com/HaraldNordgren
|
||||
#
|
||||
# Debian, Ubuntu and friends related zsh aliases and functions for zsh
|
||||
|
||||
(( $+commands[apt] )) && APT=apt || APT=apt-get
|
||||
|
||||
alias acs='apt-cache search'
|
||||
compdef _acs acs='apt-cache search'
|
||||
|
||||
alias afs='apt-file search --regexp'
|
||||
compdef _afs afs='apt-file search --regexp'
|
||||
|
||||
# These are apt/apt-get only
|
||||
alias ags="$APT source" # asrc
|
||||
compdef _ags ags="$APT source"
|
||||
# These are apt-get only
|
||||
alias ags='apt-get source' # asrc
|
||||
compdef _ags ags='apt-get source'
|
||||
|
||||
alias acp='apt-cache policy' # app
|
||||
compdef _acp acp='apt-cache policy'
|
||||
@ -40,33 +37,33 @@ compdef _afu afu='sudo apt-file update'
|
||||
alias ppap='sudo ppa-purge'
|
||||
compdef _ppap ppap='sudo ppa-purge'
|
||||
|
||||
alias ag="sudo $APT" # age - but without sudo
|
||||
alias aga="sudo $APT autoclean" # aac
|
||||
alias agb="sudo $APT build-dep" # abd
|
||||
alias agc="sudo $APT clean" # adc
|
||||
alias agd="sudo $APT dselect-upgrade" # ads
|
||||
alias agi="sudo $APT install" # ai
|
||||
alias agp="sudo $APT purge" # ap
|
||||
alias agr="sudo $APT remove" # ar
|
||||
alias agu="sudo $APT update" # ad
|
||||
alias agud="sudo $APT update && sudo $APT dist-upgrade" #adu
|
||||
alias agug="sudo $APT upgrade" # ag
|
||||
alias aguu="sudo $APT update && sudo $APT upgrade" #adg
|
||||
alias agar="sudo $APT autoremove"
|
||||
alias apg='sudo apt-get' # age - but without sudo
|
||||
alias aga='sudo apt-get autoclean' # aac
|
||||
alias agb='sudo apt-get build-dep' # abd
|
||||
alias agc='sudo apt-get clean' # adc
|
||||
alias agd='sudo apt-get dselect-upgrade' # ads
|
||||
alias agi='sudo apt-get install' # ai
|
||||
alias agp='sudo apt-get purge' # ap
|
||||
alias agr='sudo apt-get remove' # ar
|
||||
alias agu='sudo apt-get update' # ad
|
||||
alias agud='sudo apt-get update && sudo apt-get full-upgrade' #adu
|
||||
alias agug='sudo apt-get upgrade' # ag
|
||||
alias aguu='sudo apt-get update && sudo apt-get upgrade' #adg
|
||||
alias agar='sudo apt-get autoremove'
|
||||
|
||||
compdef _ag ag="sudo $APT"
|
||||
compdef _aga aga="sudo $APT autoclean"
|
||||
compdef _agb agb="sudo $APT build-dep"
|
||||
compdef _agc agc="sudo $APT clean"
|
||||
compdef _agd agd="sudo $APT dselect-upgrade"
|
||||
compdef _agi agi="sudo $APT install"
|
||||
compdef _agp agp="sudo $APT purge"
|
||||
compdef _agr agr="sudo $APT remove"
|
||||
compdef _agu agu="sudo $APT update"
|
||||
compdef _agud agud="sudo $APT update && sudo $APT dist-upgrade"
|
||||
compdef _agug agug="sudo $APT upgrade"
|
||||
compdef _aguu aguu="sudo $APT update && sudo $APT upgrade"
|
||||
compdef _agar agar="sudo $APT autoremove"
|
||||
compdef _ag apg='sudo apt-get'
|
||||
compdef _aga aga='sudo apt-get autoclean'
|
||||
compdef _agb agb='sudo apt-get build-dep'
|
||||
compdef _agc agc='sudo apt-get clean'
|
||||
compdef _agd agd='sudo apt-get dselect-upgrade'
|
||||
compdef _agi agi='sudo apt-get install'
|
||||
compdef _agp agp='sudo apt-get purge'
|
||||
compdef _agr agr='sudo apt-get remove'
|
||||
compdef _agu agu='sudo apt-get update'
|
||||
compdef _agud agud='sudo apt-get update && sudo apt-get full-upgrade'
|
||||
compdef _agug agug='sudo apt-get upgrade'
|
||||
compdef _aguu aguu='sudo apt-get update && sudo apt-get upgrade'
|
||||
compdef _agar agar='sudo apt-get autoremove'
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='sudo aptitude remove -P ?and(~i~nlinux-(ima|hea) \
|
||||
@ -94,8 +91,8 @@ aar() {
|
||||
PACKAGE=${1##*/}
|
||||
fi
|
||||
|
||||
sudo apt-add-repository $1 && sudo $APT update
|
||||
sudo $APT install $PACKAGE
|
||||
sudo apt-add-repository $1 && sudo apt-get update
|
||||
sudo apt-get install $PACKAGE
|
||||
}
|
||||
|
||||
# Prints apt history
|
||||
|
@ -1,27 +1,11 @@
|
||||
virtualenvwrapper='virtualenvwrapper.sh'
|
||||
virtualenvwrapper_lazy='virtualenvwrapper_lazy.sh'
|
||||
|
||||
if (( $+commands[$virtualenvwrapper_lazy] )); then
|
||||
function {
|
||||
setopt local_options
|
||||
unsetopt equals
|
||||
virtualenvwrapper=${${virtualenvwrapper_lazy}:c}
|
||||
source ${${virtualenvwrapper_lazy}:c}
|
||||
[[ -z "$WORKON_HOME" ]] && WORKON_HOME="$HOME/.virtualenvs"
|
||||
}
|
||||
elif (( $+commands[$virtualenvwrapper] )); then
|
||||
if (( $+commands[$virtualenvwrapper] )); then
|
||||
function {
|
||||
setopt local_options
|
||||
unsetopt equals
|
||||
source ${${virtualenvwrapper}:c}
|
||||
}
|
||||
elif [[ -f "/usr/local/bin/virtualenvwrapper.sh" ]]; then
|
||||
function {
|
||||
setopt local_options
|
||||
unsetopt equals
|
||||
virtualenvwrapper="/usr/local/bin/virtualenvwrapper.sh"
|
||||
source "/usr/local/bin/virtualenvwrapper.sh"
|
||||
}
|
||||
elif [[ -f "/etc/bash_completion.d/virtualenvwrapper" ]]; then
|
||||
function {
|
||||
setopt local_options
|
||||
|
@ -6,7 +6,7 @@ function vundle-init () {
|
||||
|
||||
if [ ! -d ~/.vim/bundle/Vundle.vim/.git ] && [ ! -f ~/.vim/bundle/Vundle.vim/.git ]
|
||||
then
|
||||
git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
|
||||
git clone git://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
|
||||
echo "\n\tRead about vim configuration for vundle at https://github.com/VundleVim/Vundle.vim\n"
|
||||
fi
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ plugins=(... xcode)
|
||||
| xcdd | Purge all temporary build information | rm -rf ~/Library/Developer/Xcode/DerivedData/* |
|
||||
| xcp | Show currently selected Xcode directory | xcode-select --print-path |
|
||||
| xcsel | Select different Xcode directory by path | sudo xcode-select --switch |
|
||||
| xx | Opens the files listed in Xcode | open -a "Xcode.app" |
|
||||
|
||||
|
||||
|
||||
## Functions
|
||||
@ -29,10 +29,6 @@ plugins=(... xcode)
|
||||
Opens the current directory in Xcode as an Xcode project. This will open one of the `.xcworkspace` and `.xcodeproj` files that it can find in the current working directory. You can also specify a directory to look in for the Xcode files.
|
||||
Returns 1 if it didn't find any relevant files.
|
||||
|
||||
### `xx`
|
||||
|
||||
Opens the files listed in Xcode, multiple files are opened in a multi-file browser.
|
||||
|
||||
### `simulator`
|
||||
|
||||
Opens the iOS Simulator from your command line, dependent on whichever is the active developer directory for Xcode. (That is, it respects the `xcsel` setting.)
|
||||
|
@ -27,17 +27,6 @@ function xc {
|
||||
fi
|
||||
}
|
||||
|
||||
# Opens a file or files in the Xcode IDE. Multiple files are opened in multi-file browser
|
||||
# original author: @possen
|
||||
function xx {
|
||||
if [[ $# == 0 ]]; then
|
||||
echo "Specify file(s) to open in xcode."
|
||||
return 1
|
||||
fi
|
||||
echo "${xcode_files}"
|
||||
open -a "Xcode.app" "$@"
|
||||
}
|
||||
|
||||
# "XCode-SELect by Version" - select Xcode by just version number
|
||||
# Uses naming convention:
|
||||
# - different versions of Xcode are named Xcode-<version>.app or stored
|
||||
|
@ -1,23 +0,0 @@
|
||||
# zsh_reload plugin
|
||||
|
||||
The zsh_reload plugin defines a function to reload the zsh session with
|
||||
just a few keystrokes.
|
||||
|
||||
To use it, add `zsh_reload` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... zsh_reload)
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
To reload the zsh session, just run `src`:
|
||||
|
||||
```zsh
|
||||
$ vim ~/.zshrc # enabled a plugin
|
||||
$ src
|
||||
re-compiling /home/user/.zshrc.zwc: succeeded
|
||||
re-compiling /home/user/.oh-my-zsh/cache/zcomp-host.zwc: succeeded
|
||||
|
||||
# you now have a fresh zsh session. happy hacking!
|
||||
```
|
@ -1,12 +1,13 @@
|
||||
src() {
|
||||
local cache="$ZSH_CACHE_DIR"
|
||||
autoload -U compinit zrecompile
|
||||
compinit -i -d "$cache/zcomp-$HOST"
|
||||
# reload zshrc
|
||||
function src()
|
||||
{
|
||||
local cache=$ZSH_CACHE_DIR
|
||||
autoload -U compinit zrecompile
|
||||
compinit -d "$cache/zcomp-$HOST"
|
||||
|
||||
for f in ~/.zshrc "$cache/zcomp-$HOST"; do
|
||||
zrecompile -p $f && command rm -f $f.zwc.old
|
||||
done
|
||||
for f in ~/.zshrc "$cache/zcomp-$HOST"; do
|
||||
zrecompile -p $f && command rm -f $f.zwc.old
|
||||
done
|
||||
|
||||
# Use $SHELL if available; remove leading dash if login shell
|
||||
[[ -n "$SHELL" ]] && exec ${SHELL#-} || exec zsh
|
||||
source ~/.zshrc
|
||||
}
|
||||
|
@ -93,9 +93,9 @@ prompt_git() {
|
||||
PL_BRANCH_CHAR=$'\ue0a0' #
|
||||
}
|
||||
local ref dirty mode repo_path
|
||||
repo_path=$(git rev-parse --git-dir 2>/dev/null)
|
||||
|
||||
if $(git rev-parse --is-inside-work-tree >/dev/null 2>&1); then
|
||||
repo_path=$(git rev-parse --git-dir 2>/dev/null)
|
||||
dirty=$(parse_git_dirty)
|
||||
ref=$(git symbolic-ref HEAD 2> /dev/null) || ref="➦ $(git rev-parse --short HEAD 2> /dev/null)"
|
||||
if [[ -n $dirty ]]; then
|
||||
@ -151,7 +151,7 @@ prompt_bzr() {
|
||||
|
||||
prompt_hg() {
|
||||
(( $+commands[hg] )) || return
|
||||
local rev st branch
|
||||
local rev status
|
||||
if $(hg id >/dev/null 2>&1); then
|
||||
if $(hg prompt >/dev/null 2>&1); then
|
||||
if [[ $(hg prompt "{status|unknown}") = "?" ]]; then
|
||||
@ -222,25 +222,7 @@ build_prompt() {
|
||||
prompt_git
|
||||
prompt_bzr
|
||||
prompt_hg
|
||||
prompt_docker_host
|
||||
# prompt_k8s_context
|
||||
prompt_end
|
||||
}
|
||||
|
||||
PROMPT='%{%f%b%k%}$(build_prompt) '
|
||||
|
||||
prompt_docker_host() {
|
||||
if [[ ! -z "$DOCKER_MACHINE_NAME" ]]; then
|
||||
prompt_segment red default "\xF0\x9F\x90\xB3: '$DOCKER_MACHINE_NAME'"
|
||||
elif [[ ! -z "$DOCKER_HOST" ]]; then
|
||||
prompt_segment red default "\xF0\x9F\x90\xB3: '$DOCKER_HOST'"
|
||||
fi
|
||||
}
|
||||
|
||||
# k8s context
|
||||
#prompt_k8s_context() {
|
||||
# K8S_CONTEXT=$(cat $KUBECONFIG | grep 'current-context:' | cut -d: -f2 | tr -d ' ')
|
||||
# if [[ -n "$K8S_CONTEXT" ]]; then
|
||||
# prompt_segment red default "%{$fg_bold[blue]%}\xE2\x8E\x88%{$fg_no_bold[white]%}: '$K8S_CONTEXT'"
|
||||
# fi
|
||||
#}
|
||||
|
@ -60,7 +60,7 @@ zstyle ':vcs_info:*:prompt:*' nvcsformats ""
|
||||
|
||||
|
||||
function steeef_preexec {
|
||||
case "$2" in
|
||||
case "$(history $HISTCMD)" in
|
||||
*git*)
|
||||
PR_GIT_UPDATE=1
|
||||
;;
|
||||
|
@ -1,13 +1,16 @@
|
||||
# user, host, full path, and time/date on two lines for easier vgrepping
|
||||
|
||||
if ! grep -q "prompt" ~/.hgrc; then
|
||||
echo "This theme requires 'hg prompt' (https://bitbucket.org/sjl/hg-prompt/overview)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
function hg_prompt_info {
|
||||
if (( $+commands[hg] )) && grep -q "prompt" ~/.hgrc; then
|
||||
hg prompt --angle-brackets "\
|
||||
<hg:%{$fg[magenta]%}<branch>%{$reset_color%}><:%{$fg[magenta]%}<bookmark>%{$reset_color%}>\
|
||||
</%{$fg[yellow]%}<tags|%{$reset_color%}, %{$fg[yellow]%}>%{$reset_color%}>\
|
||||
%{$fg[red]%}<status|modified|unknown><update>%{$reset_color%}<
|
||||
patches: <patches|join( → )|pre_applied(%{$fg[yellow]%})|post_applied(%{$reset_color%})|pre_unapplied(%{$fg_bold[black]%})|post_unapplied(%{$reset_color%})>>" 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
ZSH_THEME_GIT_PROMPT_ADDED="%{$fg[cyan]%}+"
|
||||
|
@ -62,13 +62,10 @@ zstyle ':vcs_info:*:prompt:*' nvcsformats ""
|
||||
|
||||
|
||||
function steeef_preexec {
|
||||
case "$2" in
|
||||
case "$(history $HISTCMD)" in
|
||||
*git*)
|
||||
PR_GIT_UPDATE=1
|
||||
;;
|
||||
*hub*)
|
||||
PR_GIT_UPDATE=1
|
||||
;;
|
||||
*svn*)
|
||||
PR_GIT_UPDATE=1
|
||||
;;
|
||||
|
@ -42,7 +42,7 @@ if mkdir "$ZSH/log/update.lock" 2>/dev/null; then
|
||||
if [ "$DISABLE_UPDATE_PROMPT" = "true" ]; then
|
||||
_upgrade_zsh
|
||||
else
|
||||
echo "[Oh My Zsh] Would you like to update? [Y/n]: \c"
|
||||
echo "[Oh My Zsh] Would you like to check for updates? [Y/n]: \c"
|
||||
read line
|
||||
if [[ "$line" == Y* ]] || [[ "$line" == y* ]] || [ -z "$line" ]; then
|
||||
_upgrade_zsh
|
||||
|
@ -24,11 +24,12 @@ main() {
|
||||
# which may fail on systems lacking tput or terminfo
|
||||
set -e
|
||||
|
||||
if ! command -v zsh >/dev/null 2>&1; then
|
||||
CHECK_ZSH_INSTALLED=$(grep /zsh$ /etc/shells | wc -l)
|
||||
if [ ! $CHECK_ZSH_INSTALLED -ge 1 ]; then
|
||||
printf "${YELLOW}Zsh is not installed!${NORMAL} Installing zsh\n"
|
||||
sudo apt-get install zsh -y
|
||||
|
||||
fi
|
||||
unset CHECK_ZSH_INSTALLED
|
||||
|
||||
if [ ! -n "$ZSH" ]; then
|
||||
ZSH=~/.oh-my-zsh
|
||||
|
Loading…
Reference in New Issue
Block a user