Обновления 20.09.2021
This commit is contained in:
13
zsh/plugins/ag/README.md
Normal file
13
zsh/plugins/ag/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# The Silver Searcher
|
||||
|
||||
This plugin provides completion support for [`ag`](https://github.com/ggreer/the_silver_searcher).
|
||||
|
||||
To use it, add ag to the plugins array in your zshrc file.
|
||||
|
||||
```zsh
|
||||
plugins=(... ag)
|
||||
```
|
||||
|
||||
## INSTALLATION NOTES
|
||||
|
||||
Besides oh-my-zsh, `ag` needs to be installed by following these steps: https://github.com/ggreer/the_silver_searcher#installing.
|
||||
66
zsh/plugins/ag/_ag
Normal file
66
zsh/plugins/ag/_ag
Normal file
@@ -0,0 +1,66 @@
|
||||
#compdef ag
|
||||
#autoload
|
||||
|
||||
typeset -A opt_args
|
||||
|
||||
# Took the liberty of not listing every option… specially aliases and -D
|
||||
_ag () {
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
'--ackmate:Print results in AckMate-parseable format'
|
||||
{'-A','--after'}':[LINES] Print lines after match (Default: 2)'
|
||||
{'-B','--before'}':[LINES] Print lines before match (Default: 2)'
|
||||
'--break:Print newlines between matches in different files'
|
||||
'--nobreak:Do not print newlines between matches in different files'
|
||||
{'-c','--count'}':Only print the number of matches in each file'
|
||||
'--color:Print color codes in results (Default: On)'
|
||||
'--nocolor:Do not print color codes in results'
|
||||
'--color-line-number:Color codes for line numbers (Default: 1;33)'
|
||||
'--color-match:Color codes for result match numbers (Default: 30;43)'
|
||||
'--color-path:Color codes for path names (Default: 1;32)'
|
||||
'--column:Print column numbers in results'
|
||||
{'-H','--heading'}':Print file names (On unless searching a single file)'
|
||||
'--noheading:Do not print file names (On unless searching a single file)'
|
||||
'--line-numbers:Print line numbers even for streams'
|
||||
{'-C','--context'}':[LINES] Print lines before and after matches (Default: 2)'
|
||||
'-g:[PATTERN] Print filenames matching PATTERN'
|
||||
{'-l','--files-with-matches'}':Only print filenames that contain matches'
|
||||
{'-L','--files-without-matches'}':Only print filenames that do not contain matches'
|
||||
'--no-numbers:Do not print line numbers'
|
||||
{'-o','--only-matching'}':Prints only the matching part of the lines'
|
||||
'--print-long-lines:Print matches on very long lines (Default: 2k characters)'
|
||||
'--passthrough:When searching a stream, print all lines even if they do not match'
|
||||
'--silent:Suppress all log messages, including errors'
|
||||
'--stats:Print stats (files scanned, time taken, etc.)'
|
||||
'--vimgrep:Print results like vim :vimgrep /pattern/g would'
|
||||
{'-0','--null'}':Separate filenames with null (for "xargs -0")'
|
||||
|
||||
{'-a','--all-types'}':Search all files (does not include hidden files / .gitignore)'
|
||||
'--depth:[NUM] Search up to NUM directories deep (Default: 25)'
|
||||
{'-f','--follow'}':Follow symlinks'
|
||||
{'-G','--file-search-regex'}':[PATTERN] Limit search to filenames matching PATTERN'
|
||||
'--hidden:Search hidden files (obeys .*ignore files)'
|
||||
{'-i','--ignore-case'}':Match case insensitively'
|
||||
'--ignore:[PATTERN] Ignore files/directories matching PATTERN'
|
||||
{'-m','--max-count'}':[NUM] Skip the rest of a file after NUM matches (Default: 10k)'
|
||||
{'-p','--path-to-agignore'}':[PATH] Use .agignore file at PATH'
|
||||
{'-Q','--literal'}':Do not parse PATTERN as a regular expression'
|
||||
{'-s','--case-sensitive'}':Match case'
|
||||
{'-S','--smart-case'}':Insensitive match unless PATTERN has uppercase (Default: On)'
|
||||
'--search-binary:Search binary files for matches'
|
||||
{'-t','--all-text'}':Search all text files (Hidden files not included)'
|
||||
{'-u','--unrestricted'}':Search all files (ignore .agignore and _all_)'
|
||||
{'-U','--skip-vcs-ignores'}':Ignore VCS files (stil obey .agignore)'
|
||||
{'-v','--invert-match'}':Invert match'
|
||||
{'-w','--word-regexp'}':Only match whole words'
|
||||
{'-z','--search-zip'}':Search contents of compressed (e.g., gzip) files'
|
||||
|
||||
'--list-file-types:list of supported file types'
|
||||
)
|
||||
|
||||
if [[ $words[-1] =~ "^-" ]]; then
|
||||
_describe -t commands "ag options" _1st_arguments && ret=0
|
||||
else
|
||||
_files && ret=0
|
||||
fi
|
||||
}
|
||||
21
zsh/plugins/aliases/README.md
Normal file
21
zsh/plugins/aliases/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
## Aliases Cheatsheet
|
||||
|
||||
**Maintainer:** [@hqingyi](https://github.com/hqingyi)
|
||||
|
||||
With lots of 3rd-party amazing aliases installed, this plugin helps list the shortcuts
|
||||
that are currently available based on the plugins you have enabled.
|
||||
|
||||
Enable this plugin by adding it to your `plugins` definition in `~/.zshrc`.
|
||||
|
||||
```
|
||||
plugins=(aliases)
|
||||
```
|
||||
|
||||
Requirements: Python needs to be installed.
|
||||
|
||||
### Usage
|
||||
|
||||
```
|
||||
acs: group all alias
|
||||
acs $keywordquickly filter alias & highlight
|
||||
```
|
||||
10
zsh/plugins/aliases/aliases.plugin.zsh
Normal file
10
zsh/plugins/aliases/aliases.plugin.zsh
Normal file
@@ -0,0 +1,10 @@
|
||||
# with lots of 3rd-party amazing aliases installed, just need something to explore it quickly.
|
||||
#
|
||||
# - acs: alias cheatsheet
|
||||
# group alias by command, pass addition argv to grep.
|
||||
ALIASES_PLUGIN_ROOT=$(cd `dirname $0` && pwd)
|
||||
function acs(){
|
||||
which python >>/dev/null
|
||||
[[ $? -eq 1 ]] && echo "[error]no python executable detected!" && return
|
||||
alias | python $ALIASES_PLUGIN_ROOT/cheatsheet.py $@
|
||||
}
|
||||
55
zsh/plugins/aliases/cheatsheet.py
Normal file
55
zsh/plugins/aliases/cheatsheet.py
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import itertools
|
||||
import termcolor
|
||||
|
||||
def parse(line):
|
||||
left = line[0:line.find('=')].strip()
|
||||
right = line[line.find('=')+1:].strip('\'"\n ')
|
||||
try:
|
||||
cmd = next(part for part in right.split() if len([char for char in '=<>' if char in part])==0)
|
||||
except StopIteration:
|
||||
cmd = right
|
||||
return (left, right, cmd)
|
||||
|
||||
def cheatsheet(lines):
|
||||
exps = [ parse(line) for line in lines ]
|
||||
cheatsheet = {'_default': []}
|
||||
for key, group in itertools.groupby(exps, lambda exp:exp[2]):
|
||||
group_list = [ item for item in group ]
|
||||
if len(group_list)==1:
|
||||
target_aliases = cheatsheet['_default']
|
||||
else:
|
||||
if key not in cheatsheet:
|
||||
cheatsheet[key] = []
|
||||
target_aliases = cheatsheet[key]
|
||||
target_aliases.extend(group_list)
|
||||
return cheatsheet
|
||||
|
||||
def pretty_print_group(key, aliases, hightlight=None):
|
||||
if len(aliases) == 0:
|
||||
return
|
||||
group_hl_formatter = lambda g, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'red') for part in ('[%s]' % g).split(hl)])
|
||||
alias_hl_formatter = lambda alias, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'green') for part in ('\t%s = %s' % alias[0:2]).split(hl)])
|
||||
group_formatter = lambda g: termcolor.colored('[%s]' % g, 'red')
|
||||
alias_formatter = lambda alias: termcolor.colored('\t%s = %s' % alias[0:2], 'green')
|
||||
if hightlight and len(hightlight)>0:
|
||||
print (group_hl_formatter(key, hightlight))
|
||||
print ('\n'.join([alias_hl_formatter(alias, hightlight) for alias in aliases]))
|
||||
else:
|
||||
print (group_formatter(key))
|
||||
print ('\n'.join([alias_formatter(alias) for alias in aliases]))
|
||||
print ('')
|
||||
|
||||
def pretty_print(cheatsheet, wfilter):
|
||||
sorted_key = sorted(cheatsheet.keys())
|
||||
for key in sorted_key:
|
||||
aliases = cheatsheet.get(key)
|
||||
if not wfilter:
|
||||
pretty_print_group(key, aliases, wfilter)
|
||||
else:
|
||||
pretty_print_group(key, [ alias for alias in aliases if alias[0].find(wfilter)>-1 or alias[1].find(wfilter)>-1], wfilter)
|
||||
|
||||
if __name__ == '__main__':
|
||||
lines = sys.stdin.readlines()
|
||||
pretty_print(cheatsheet(lines), sys.argv[1] if len(sys.argv)>1 else None)
|
||||
168
zsh/plugins/aliases/termcolor.py
Normal file
168
zsh/plugins/aliases/termcolor.py
Normal file
@@ -0,0 +1,168 @@
|
||||
# coding: utf-8
|
||||
# Copyright (c) 2008-2011 Volvox Development Team
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# Author: Konstantin Lepa <konstantin.lepa@gmail.com>
|
||||
|
||||
"""ANSII Color formatting for output in terminal."""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
|
||||
|
||||
__ALL__ = [ 'colored', 'cprint' ]
|
||||
|
||||
VERSION = (1, 1, 0)
|
||||
|
||||
ATTRIBUTES = dict(
|
||||
list(zip([
|
||||
'bold',
|
||||
'dark',
|
||||
'',
|
||||
'underline',
|
||||
'blink',
|
||||
'',
|
||||
'reverse',
|
||||
'concealed'
|
||||
],
|
||||
list(range(1, 9))
|
||||
))
|
||||
)
|
||||
del ATTRIBUTES['']
|
||||
|
||||
|
||||
HIGHLIGHTS = dict(
|
||||
list(zip([
|
||||
'on_grey',
|
||||
'on_red',
|
||||
'on_green',
|
||||
'on_yellow',
|
||||
'on_blue',
|
||||
'on_magenta',
|
||||
'on_cyan',
|
||||
'on_white'
|
||||
],
|
||||
list(range(40, 48))
|
||||
))
|
||||
)
|
||||
|
||||
|
||||
COLORS = dict(
|
||||
list(zip([
|
||||
'grey',
|
||||
'red',
|
||||
'green',
|
||||
'yellow',
|
||||
'blue',
|
||||
'magenta',
|
||||
'cyan',
|
||||
'white',
|
||||
],
|
||||
list(range(30, 38))
|
||||
))
|
||||
)
|
||||
|
||||
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
def colored(text, color=None, on_color=None, attrs=None):
|
||||
"""Colorize text.
|
||||
|
||||
Available text colors:
|
||||
red, green, yellow, blue, magenta, cyan, white.
|
||||
|
||||
Available text highlights:
|
||||
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
|
||||
|
||||
Available attributes:
|
||||
bold, dark, underline, blink, reverse, concealed.
|
||||
|
||||
Example:
|
||||
colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
|
||||
colored('Hello, World!', 'green')
|
||||
"""
|
||||
if os.getenv('ANSI_COLORS_DISABLED') is None:
|
||||
fmt_str = '\033[%dm%s'
|
||||
if color is not None:
|
||||
text = fmt_str % (COLORS[color], text)
|
||||
|
||||
if on_color is not None:
|
||||
text = fmt_str % (HIGHLIGHTS[on_color], text)
|
||||
|
||||
if attrs is not None:
|
||||
for attr in attrs:
|
||||
text = fmt_str % (ATTRIBUTES[attr], text)
|
||||
|
||||
text += RESET
|
||||
return text
|
||||
|
||||
|
||||
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
|
||||
"""Print colorize text.
|
||||
|
||||
It accepts arguments of print function.
|
||||
"""
|
||||
|
||||
print((colored(text, color, on_color, attrs)), **kwargs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Current terminal type: %s' % os.getenv('TERM'))
|
||||
print('Test basic colors:')
|
||||
cprint('Grey color', 'grey')
|
||||
cprint('Red color', 'red')
|
||||
cprint('Green color', 'green')
|
||||
cprint('Yellow color', 'yellow')
|
||||
cprint('Blue color', 'blue')
|
||||
cprint('Magenta color', 'magenta')
|
||||
cprint('Cyan color', 'cyan')
|
||||
cprint('White color', 'white')
|
||||
print(('-' * 78))
|
||||
|
||||
print('Test highlights:')
|
||||
cprint('On grey color', on_color='on_grey')
|
||||
cprint('On red color', on_color='on_red')
|
||||
cprint('On green color', on_color='on_green')
|
||||
cprint('On yellow color', on_color='on_yellow')
|
||||
cprint('On blue color', on_color='on_blue')
|
||||
cprint('On magenta color', on_color='on_magenta')
|
||||
cprint('On cyan color', on_color='on_cyan')
|
||||
cprint('On white color', color='grey', on_color='on_white')
|
||||
print('-' * 78)
|
||||
|
||||
print('Test attributes:')
|
||||
cprint('Bold grey color', 'grey', attrs=['bold'])
|
||||
cprint('Dark red color', 'red', attrs=['dark'])
|
||||
cprint('Underline green color', 'green', attrs=['underline'])
|
||||
cprint('Blink yellow color', 'yellow', attrs=['blink'])
|
||||
cprint('Reversed blue color', 'blue', attrs=['reverse'])
|
||||
cprint('Concealed Magenta color', 'magenta', attrs=['concealed'])
|
||||
cprint('Bold underline reverse cyan color', 'cyan',
|
||||
attrs=['bold', 'underline', 'reverse'])
|
||||
cprint('Dark blink concealed white color', 'white',
|
||||
attrs=['dark', 'blink', 'concealed'])
|
||||
print(('-' * 78))
|
||||
|
||||
print('Test mixing:')
|
||||
cprint('Underline red on grey color', 'red', 'on_grey',
|
||||
['underline'])
|
||||
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
|
||||
|
||||
@@ -143,6 +143,8 @@ upgrades were available. Use `pacman -Que` instead.
|
||||
| yaupd | `yaourt -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupg | `yaourt -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| yasu | `yaourt -Syua --no-confirm` | Same as `yaupg`, but without confirmation |
|
||||
| yaclun | `yaourt -Yc` | Remove unneeded installed packages |
|
||||
| yaclf | `yaourt -Scc` | Remove cache directories |
|
||||
| upgrade[²](#f2) | `yaourt -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### Yay[¹](#f1)
|
||||
@@ -165,6 +167,8 @@ upgrades were available. Use `pacman -Que` instead.
|
||||
| yaupd | `yay -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupg | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
| yasu | `yay -Syu --no-confirm` | Same as `yaupg`, but without confirmation |
|
||||
| yaclun | `yay -Yc` | Remove unneeded installed packages |
|
||||
| yaclf | `yay -Scc` | Remove cache directories |
|
||||
| upgrade[²](#f2) | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
---
|
||||
|
||||
@@ -170,6 +170,8 @@ if (( $+commands[yaourt] )); then
|
||||
alias yainsd='yaourt -S --asdeps'
|
||||
alias yamir='yaourt -Syy'
|
||||
alias yaupd="yaourt -Sy"
|
||||
alias yaclun='yaourt -Yc'
|
||||
alias yaclf='yaourt -Scc'
|
||||
alias upgrade='yaourt -Syu'
|
||||
fi
|
||||
|
||||
@@ -190,6 +192,8 @@ if (( $+commands[yay] )); then
|
||||
alias yainsd='yay -S --asdeps'
|
||||
alias yamir='yay -Syy'
|
||||
alias yaupd="yay -Sy"
|
||||
alias yaclun='yay -Yc'
|
||||
alias yaclf='yay -Scc'
|
||||
alias upgrade='yay -Syu'
|
||||
fi
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ autojump_paths=(
|
||||
/usr/local/share/autojump/autojump.zsh # FreeBSD installation
|
||||
/opt/local/etc/profile.d/autojump.sh # macOS with MacPorts
|
||||
/usr/local/etc/profile.d/autojump.sh # macOS with Homebrew (default)
|
||||
/opt/homebrew/etc/profile.d/autojump.sh # macOS with Homebrew (default on M1 macs)
|
||||
)
|
||||
|
||||
for file in $autojump_paths; do
|
||||
|
||||
@@ -62,47 +62,47 @@ function acp() {
|
||||
read -r sess_duration
|
||||
fi
|
||||
mfa_opt=(--serial-number "$mfa_serial" --token-code "$mfa_token" --duration-seconds "${sess_duration:-3600}")
|
||||
fi
|
||||
|
||||
# Now see whether we need to just MFA for the current role, or assume a different one
|
||||
local role_arn="$(aws configure get role_arn --profile $profile)"
|
||||
local sess_name="$(aws configure get role_session_name --profile $profile)"
|
||||
# Now see whether we need to just MFA for the current role, or assume a different one
|
||||
local role_arn="$(aws configure get role_arn --profile $profile)"
|
||||
local sess_name="$(aws configure get role_session_name --profile $profile)"
|
||||
|
||||
if [[ -n "$role_arn" ]]; then
|
||||
# Means we need to assume a specified role
|
||||
aws_command=(aws sts assume-role --role-arn "$role_arn" "${mfa_opt[@]}")
|
||||
if [[ -n "$role_arn" ]]; then
|
||||
# Means we need to assume a specified role
|
||||
aws_command=(aws sts assume-role --role-arn "$role_arn" "${mfa_opt[@]}")
|
||||
|
||||
# Check whether external_id is configured to use while assuming the role
|
||||
local external_id="$(aws configure get external_id --profile $profile)"
|
||||
if [[ -n "$external_id" ]]; then
|
||||
aws_command+=(--external-id "$external_id")
|
||||
fi
|
||||
|
||||
# Get source profile to use to assume role
|
||||
local source_profile="$(aws configure get source_profile --profile $profile)"
|
||||
if [[ -z "$sess_name" ]]; then
|
||||
sess_name="${source_profile:-profile}"
|
||||
fi
|
||||
aws_command+=(--profile="${source_profile:-profile}" --role-session-name "${sess_name}")
|
||||
|
||||
echo "Assuming role $role_arn using profile ${source_profile:-profile}"
|
||||
else
|
||||
# Means we only need to do MFA
|
||||
aws_command=(aws sts get-session-token --profile="$profile" "${mfa_opt[@]}")
|
||||
echo "Obtaining session token for profile $profile"
|
||||
# Check whether external_id is configured to use while assuming the role
|
||||
local external_id="$(aws configure get external_id --profile $profile)"
|
||||
if [[ -n "$external_id" ]]; then
|
||||
aws_command+=(--external-id "$external_id")
|
||||
fi
|
||||
|
||||
# Format output of aws command for easier processing
|
||||
aws_command+=(--query '[Credentials.AccessKeyId,Credentials.SecretAccessKey,Credentials.SessionToken]' --output text)
|
||||
|
||||
# Run the aws command to obtain credentials
|
||||
local -a credentials
|
||||
credentials=(${(ps:\t:)"$(${aws_command[@]})"})
|
||||
|
||||
if [[ -n "$credentials" ]]; then
|
||||
aws_access_key_id="${credentials[1]}"
|
||||
aws_secret_access_key="${credentials[2]}"
|
||||
aws_session_token="${credentials[3]}"
|
||||
# Get source profile to use to assume role
|
||||
local source_profile="$(aws configure get source_profile --profile $profile)"
|
||||
if [[ -z "$sess_name" ]]; then
|
||||
sess_name="${source_profile:-profile}"
|
||||
fi
|
||||
aws_command+=(--profile="${source_profile:-profile}" --role-session-name "${sess_name}")
|
||||
|
||||
echo "Assuming role $role_arn using profile ${source_profile:-profile}"
|
||||
else
|
||||
# Means we only need to do MFA
|
||||
aws_command=(aws sts get-session-token --profile="$profile" "${mfa_opt[@]}")
|
||||
echo "Obtaining session token for profile $profile"
|
||||
fi
|
||||
|
||||
# Format output of aws command for easier processing
|
||||
aws_command+=(--query '[Credentials.AccessKeyId,Credentials.SecretAccessKey,Credentials.SessionToken]' --output text)
|
||||
|
||||
# Run the aws command to obtain credentials
|
||||
local -a credentials
|
||||
credentials=(${(ps:\t:)"$(${aws_command[@]})"})
|
||||
|
||||
if [[ -n "$credentials" ]]; then
|
||||
aws_access_key_id="${credentials[1]}"
|
||||
aws_secret_access_key="${credentials[2]}"
|
||||
aws_session_token="${credentials[3]}"
|
||||
fi
|
||||
|
||||
# Switch to AWS profile
|
||||
|
||||
5
zsh/plugins/bedtools/README.md
Normal file
5
zsh/plugins/bedtools/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Bedtools plugin
|
||||
|
||||
This plugin adds support for the [bedtools suite](http://bedtools.readthedocs.org/en/latest/):
|
||||
|
||||
* Adds autocomplete options for all bedtools sub commands.
|
||||
64
zsh/plugins/bedtools/_bedtools
Normal file
64
zsh/plugins/bedtools/_bedtools
Normal file
@@ -0,0 +1,64 @@
|
||||
#compdef bedtools
|
||||
#autoload
|
||||
|
||||
local curcontext="$curcontext" state line ret=1
|
||||
local -a _files
|
||||
|
||||
_arguments -C \
|
||||
'1: :->cmds' \
|
||||
'2:: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
_values "bedtools command" \
|
||||
"--contact[Feature requests, bugs, mailing lists, etc.]" \
|
||||
"--help[Print this help menu.]" \
|
||||
"--version[What version of bedtools are you using?.]" \
|
||||
"annotate[Annotate coverage of features from multiple files.]" \
|
||||
"bamtobed[Convert BAM alignments to BED (& other) formats.]" \
|
||||
"bamtofastq[Convert BAM records to FASTQ records.]" \
|
||||
"bed12tobed6[Breaks BED12 intervals into discrete BED6 intervals.]" \
|
||||
"bedpetobam[Convert BEDPE intervals to BAM records.]" \
|
||||
"bedtobam[Convert intervals to BAM records.]" \
|
||||
"closest[Find the closest, potentially non-overlapping interval.]" \
|
||||
"cluster[Cluster (but don't merge) overlapping/nearby intervals.]" \
|
||||
"complement[Extract intervals _not_ represented by an interval file.]" \
|
||||
"coverage[Compute the coverage over defined intervals.]" \
|
||||
"expand[Replicate lines based on lists of values in columns.]" \
|
||||
"fisher[Calculate Fisher statistic b/w two feature files.]" \
|
||||
"flank[Create new intervals from the flanks of existing intervals.]" \
|
||||
"genomecov[Compute the coverage over an entire genome.]" \
|
||||
"getfasta[Use intervals to extract sequences from a FASTA file.]" \
|
||||
"groupby[Group by common cols. & summarize oth. cols. (~ SQL "groupBy")]" \
|
||||
"igv[Create an IGV snapshot batch script.]" \
|
||||
"intersect[Find overlapping intervals in various ways.]" \
|
||||
"jaccard[Calculate the Jaccard statistic b/w two sets of intervals.]" \
|
||||
"links[Create a HTML page of links to UCSC locations.]" \
|
||||
"makewindows[Make interval "windows" across a genome.]" \
|
||||
"map[Apply a function to a column for each overlapping interval.]" \
|
||||
"maskfasta[Use intervals to mask sequences from a FASTA file.]" \
|
||||
"merge[Combine overlapping/nearby intervals into a single interval.]" \
|
||||
"multicov[Counts coverage from multiple BAMs at specific intervals.]" \
|
||||
"multiinter[Identifies common intervals among multiple interval files.]" \
|
||||
"nuc[Profile the nucleotide content of intervals in a FASTA file.]" \
|
||||
"overlap[Computes the amount of overlap from two intervals.]" \
|
||||
"pairtobed[Find pairs that overlap intervals in various ways.]" \
|
||||
"pairtopair[Find pairs that overlap other pairs in various ways.]" \
|
||||
"random[Generate random intervals in a genome.]" \
|
||||
"reldist[Calculate the distribution of relative distances b/w two files.]" \
|
||||
"sample[Sample random records from file using reservoir sampling.]" \
|
||||
"shuffle[Randomly redistrubute intervals in a genome.]" \
|
||||
"slop[Adjust the size of intervals.]" \
|
||||
"sort[Order the intervals in a file.]" \
|
||||
"subtract[Remove intervals based on overlaps b/w two files.]" \
|
||||
"tag[Tag BAM alignments based on overlaps with interval files.]" \
|
||||
"unionbedg[Combines coverage intervals from multiple BEDGRAPH files.]" \
|
||||
"window[Find overlapping intervals within a window around an interval.]" \
|
||||
ret=0
|
||||
;;
|
||||
*)
|
||||
_files
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
@@ -1,4 +1,6 @@
|
||||
alias bi="bower install"
|
||||
alias bisd="bower install --save-dev"
|
||||
alias bis="bower install --save"
|
||||
alias bl="bower list"
|
||||
alias bs="bower search"
|
||||
|
||||
|
||||
@@ -1,407 +0,0 @@
|
||||
#compdef cargo
|
||||
|
||||
autoload -U regexp-replace
|
||||
|
||||
_cargo() {
|
||||
local curcontext="$curcontext" ret=1
|
||||
local -a command_scope_spec common parallel features msgfmt triple target registry
|
||||
local -a state line state_descr # These are set by _arguments
|
||||
typeset -A opt_args
|
||||
|
||||
common=(
|
||||
'(-q --quiet)*'{-v,--verbose}'[use verbose output]'
|
||||
'(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]'
|
||||
'-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags'
|
||||
'--frozen[require that Cargo.lock and cache are up to date]'
|
||||
'--locked[require that Cargo.lock is up to date]'
|
||||
'--color=[specify colorization option]:coloring:(auto always never)'
|
||||
'(- 1 *)'{-h,--help}'[show help message]'
|
||||
)
|
||||
|
||||
# leading items in parentheses are an exclusion list for the arguments following that arg
|
||||
# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
|
||||
# - => exclude all other options
|
||||
# 1 => exclude positional arg 1
|
||||
# * => exclude all other args
|
||||
# +blah => exclude +blah
|
||||
_arguments -s -S -C $common \
|
||||
'(- 1 *)--list[list installed commands]' \
|
||||
'(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \
|
||||
'(- 1 *)'{-V,--version}'[show version information]' \
|
||||
'(+beta +nightly)+stable[use the stable toolchain]' \
|
||||
'(+stable +nightly)+beta[use the beta toolchain]' \
|
||||
'(+stable +beta)+nightly[use the nightly toolchain]' \
|
||||
'1: :_cargo_cmds' \
|
||||
'*:: :->args'
|
||||
|
||||
# These flags are mutually exclusive specifiers for the scope of a command; as
|
||||
# they are used in multiple places without change, they are expanded into the
|
||||
# appropriate command's `_arguments` where appropriate.
|
||||
command_scope_spec=(
|
||||
'(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names'
|
||||
'(--bench --bin --test --lib)--example=[specify example name]:example name'
|
||||
'(--bench --example --test --lib)--bin=[specify binary name]:binary name'
|
||||
'(--bench --bin --example --test)--lib=[specify library name]:library name'
|
||||
'(--bench --bin --example --lib)--test=[specify test name]:test name'
|
||||
)
|
||||
|
||||
parallel=(
|
||||
'(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
|
||||
)
|
||||
|
||||
features=(
|
||||
'(--all-features)--features=[specify features to activate]:feature'
|
||||
'(--features)--all-features[activate all available features]'
|
||||
"--no-default-features[don't build the default features]"
|
||||
)
|
||||
|
||||
msgfmt='--message-format=[specify error format]:error format [human]:(human json short)'
|
||||
triple='--target=[specify target triple]:target triple'
|
||||
target='--target-dir=[specify directory for all generated artifacts]:directory:_directories'
|
||||
manifest='--manifest-path=[specify path to manifest]:path:_directories'
|
||||
registry='--registry=[specify registry to use]:registry'
|
||||
|
||||
case $state in
|
||||
args)
|
||||
curcontext="${curcontext%:*}-${words[1]}:"
|
||||
case ${words[1]} in
|
||||
bench)
|
||||
_arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--all-targets[benchmark all targets]' \
|
||||
"--no-run[compile but don't run]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \
|
||||
'--exclude=[exclude packages from the benchmark]:spec' \
|
||||
'--no-fail-fast[run all benchmarks regardless of failure]' \
|
||||
'1: :_guard "^-*" "bench name"' \
|
||||
'*:args:_default'
|
||||
;;
|
||||
|
||||
build|b)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'--build-plan[output the build plan in JSON]' \
|
||||
;;
|
||||
|
||||
check|c)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \
|
||||
'--release[check in release mode]' \
|
||||
;;
|
||||
|
||||
clean)
|
||||
_arguments -s -S $common $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \
|
||||
'--release[clean release artifacts]' \
|
||||
'--doc[clean just the documentation directory]'
|
||||
;;
|
||||
|
||||
doc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--no-deps[do not build docs for dependencies]' \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open docs in browser after the build]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
;;
|
||||
|
||||
fetch)
|
||||
_arguments -s -S $common $triple $manifest
|
||||
;;
|
||||
|
||||
fix)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--broken-code[fix code even if it already has compiler errors]' \
|
||||
'--edition[fix in preparation for the next edition]' \
|
||||
'--edition-idioms[fix warnings to migrate to the idioms of an edition]' \
|
||||
'--allow-no-vcs[fix code even if a VCS was not detected]' \
|
||||
'--allow-dirty[fix code even if the working directory is dirty]' \
|
||||
'--allow-staged[fix code even if the working directory has staged changes]'
|
||||
;;
|
||||
|
||||
generate-lockfile)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
git-checkout)
|
||||
_arguments -s -S $common \
|
||||
'--reference=:reference' \
|
||||
'--url=:url:_urls'
|
||||
;;
|
||||
|
||||
help)
|
||||
_cargo_cmds
|
||||
;;
|
||||
|
||||
init)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--edition=[specify edition to set for the crate generated]:edition:(2015 2018)' \
|
||||
'--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \
|
||||
'--name=[set the resulting package name]:name' \
|
||||
'1:path:_directories'
|
||||
;;
|
||||
|
||||
install)
|
||||
_arguments -s -S $common $parallel $features $triple $registry \
|
||||
'(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \
|
||||
'--bin=[only install the specified binary]:binary' \
|
||||
'--branch=[branch to use when installing from git]:branch' \
|
||||
'--debug[build in debug mode instead of release mode]' \
|
||||
'--example=[install the specified example instead of binaries]:example' \
|
||||
'--git=[specify URL from which to install the crate]:url:_urls' \
|
||||
'--path=[local filesystem path to crate to install]: :_directories' \
|
||||
'--rev=[specific commit to use when installing from git]:commit' \
|
||||
'--root=[directory to install packages into]: :_directories' \
|
||||
'--tag=[tag to use when installing from git]:tag' \
|
||||
'--vers=[version to install from crates.io]:version' \
|
||||
'--list[list all installed packages and their versions]' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
locate-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
login)
|
||||
_arguments -s -S $common $registry \
|
||||
'*: :_guard "^-*" "token"'
|
||||
;;
|
||||
|
||||
metadata)
|
||||
_arguments -s -S $common $features $manifest \
|
||||
"--no-deps[output information only about the root package and don't fetch dependencies]" \
|
||||
'--format-version=[specify format version]:version [1]:(1)'
|
||||
;;
|
||||
|
||||
new)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--vcs:initialize a new repo with a given VCS:(git hg none)' \
|
||||
'--name=[set the resulting package name]'
|
||||
;;
|
||||
|
||||
owner)
|
||||
_arguments -s -S $common $registry \
|
||||
'(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \
|
||||
'--index=[specify registry index]:index' \
|
||||
'(-l --list)'{-l,--list}'[list owners of a crate]' \
|
||||
'(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
package)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest \
|
||||
'(-l --list)'{-l,--list}'[print files included in a package without making one]' \
|
||||
'--no-metadata[ignore warnings about a lack of human-usable metadata]' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't build to verify contents]"
|
||||
;;
|
||||
|
||||
pkgid)
|
||||
_arguments -s -S $common $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \
|
||||
'*: :_guard "^-*" "spec"'
|
||||
;;
|
||||
|
||||
publish)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't verify the contents by building them]" \
|
||||
'--token=[specify token to use when uploading]:token' \
|
||||
'--dry-run[perform all checks without uploading]'
|
||||
;;
|
||||
|
||||
read-manifest)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
run|r)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--example=[name of the bin target]:name' \
|
||||
'--bin=[name of the bin target]:name' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
rustc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--profile=[specify profile to build the selected target for]:profile' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustc rustc -default-'
|
||||
;;
|
||||
|
||||
rustdoc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open the docs in a browser after the operation]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustdoc rustdoc -default-'
|
||||
;;
|
||||
|
||||
search)
|
||||
_arguments -s -S $common $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--limit=[limit the number of results]:results [10]' \
|
||||
'*: :_guard "^-*" "query"'
|
||||
;;
|
||||
|
||||
test|t)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--test=[test name]: :_cargo_test_names' \
|
||||
'--no-fail-fast[run all tests regardless of failure]' \
|
||||
'--no-run[compile but do not run]' \
|
||||
'(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \
|
||||
'--all[test all packages in the workspace]' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
'1: :_cargo_test_names' \
|
||||
'(--doc --bin --example --test --bench)--lib[only test library]' \
|
||||
'(--lib --bin --example --test --bench)--doc[only test documentation]' \
|
||||
'(--lib --doc --example --test --bench)--bin=[binary name]' \
|
||||
'(--lib --doc --bin --test --bench)--example=[example name]' \
|
||||
'(--lib --doc --bin --example --bench)--test=[test name]' \
|
||||
'(--lib --doc --bin --example --test)--bench=[benchmark name]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
uninstall)
|
||||
_arguments -s -S $common \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \
|
||||
'--bin=[only uninstall the specified binary]:name' \
|
||||
'--root=[directory to uninstall packages from]: :_files -/' \
|
||||
'*:crate:_cargo_installed_crates -F line'
|
||||
;;
|
||||
|
||||
update)
|
||||
_arguments -s -S $common $manifest \
|
||||
'--aggressive=[force dependency update]' \
|
||||
"--dry-run[don't actually write the lockfile]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \
|
||||
'--precise=[update single dependency to precise release]:release'
|
||||
;;
|
||||
|
||||
verify-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
version)
|
||||
_arguments -s -S $common
|
||||
;;
|
||||
|
||||
yank)
|
||||
_arguments -s -S $common $registry \
|
||||
'--vers=[specify yank version]:version' \
|
||||
'--undo[undo a yank, putting a version back into the index]' \
|
||||
'--index=[specify registry index to yank from]:registry index' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
*)
|
||||
# allow plugins to define their own functions
|
||||
if ! _call_function ret _cargo-${words[1]}; then
|
||||
# fallback on default completion for unknown commands
|
||||
_default && ret=0
|
||||
fi
|
||||
(( ! ret ))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_cargo_unstable_flags() {
|
||||
local flags
|
||||
flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } )
|
||||
_describe -t flags 'unstable flag' flags
|
||||
}
|
||||
|
||||
_cargo_installed_crates() {
|
||||
local expl
|
||||
_description crates expl 'crate'
|
||||
compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *}
|
||||
}
|
||||
|
||||
_cargo_cmds() {
|
||||
local -a commands
|
||||
# This uses Parameter Expansion Flags, which are a built-in Zsh feature.
|
||||
# See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||
# and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion
|
||||
#
|
||||
# # How this work?
|
||||
#
|
||||
# First it splits the result of `cargo --list` at newline, then it removes the first line.
|
||||
# Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]).
|
||||
# Then it replaces those spaces between item and description with a `:`
|
||||
#
|
||||
# [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns
|
||||
commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} )
|
||||
_describe -t commands 'command' commands
|
||||
}
|
||||
|
||||
|
||||
#FIXME: Disabled until fixed
|
||||
#gets package names from the manifest file
|
||||
_cargo_package_names() {
|
||||
_message -e packages package
|
||||
}
|
||||
|
||||
# Extracts the values of "name" from the array given in $1 and shows them as
|
||||
# command line options for completion
|
||||
_cargo_names_from_array() {
|
||||
# strip json from the path
|
||||
local manifest=${${${"$(cargo locate-project)"}%\"\}}##*\"}
|
||||
if [[ -z $manifest ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local last_line
|
||||
local -a names;
|
||||
local in_block=false
|
||||
local block_name=$1
|
||||
names=()
|
||||
while read -r line; do
|
||||
if [[ $last_line == "[[$block_name]]" ]]; then
|
||||
in_block=true
|
||||
else
|
||||
if [[ $last_line =~ '\s*\[\[.*' ]]; then
|
||||
in_block=false
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $in_block == true ]]; then
|
||||
if [[ $line =~ '\s*name\s*=' ]]; then
|
||||
regexp-replace line '^\s*name\s*=\s*|"' ''
|
||||
names+=( "$line" )
|
||||
fi
|
||||
fi
|
||||
|
||||
last_line=$line
|
||||
done < "$manifest"
|
||||
_describe "$block_name" names
|
||||
|
||||
}
|
||||
|
||||
#Gets the test names from the manifest file
|
||||
_cargo_test_names() {
|
||||
_cargo_names_from_array "test"
|
||||
}
|
||||
|
||||
#Gets the bench names from the manifest file
|
||||
_cargo_benchmark_names() {
|
||||
_cargo_names_from_array "bench"
|
||||
}
|
||||
|
||||
_cargo
|
||||
23
zsh/plugins/cargo/cargo.plugin.zsh
Normal file
23
zsh/plugins/cargo/cargo.plugin.zsh
Normal file
@@ -0,0 +1,23 @@
|
||||
if (( $+commands[rustup] && $+commands[cargo] )); then
|
||||
# remove old generated completion file
|
||||
command rm -f "${0:A:h}/_cargo"
|
||||
|
||||
# generate new completion file
|
||||
ver="$(cargo --version)"
|
||||
ver_file="$ZSH_CACHE_DIR/cargo_version"
|
||||
comp_file="$ZSH_CACHE_DIR/completions/_cargo"
|
||||
|
||||
mkdir -p "${comp_file:h}"
|
||||
(( ${fpath[(Ie)${comp_file:h}]} )) || fpath=("${comp_file:h}" $fpath)
|
||||
|
||||
if [[ ! -f "$comp_file" || ! -f "$ver_file" || "$ver" != "$(< "$ver_file")" ]]; then
|
||||
rustup completions zsh cargo >| "$comp_file"
|
||||
echo "$ver" >| "$ver_file"
|
||||
fi
|
||||
|
||||
declare -A _comps
|
||||
autoload -Uz _cargo
|
||||
_comps[cargo]=_cargo
|
||||
|
||||
unset ver ver_file comp_file
|
||||
fi
|
||||
@@ -37,7 +37,7 @@ _homebrew-installed() {
|
||||
}
|
||||
|
||||
_chruby-from-homebrew-installed() {
|
||||
[ -r _brew_prefix ] &> /dev/null
|
||||
[ -r $_brew_prefix ] &> /dev/null
|
||||
}
|
||||
|
||||
_ruby-build_installed() {
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# CloudApp plugin
|
||||
|
||||
## The CloudApp API is deprecated, so the plugin will be removed shortly
|
||||
|
||||
[CloudApp](https://www.getcloudapp.com) brings screen recording, screenshots, and GIF creation to the cloud, in an easy-to-use enterprise-level app. The CloudApp plugin allows you to upload a file to your CloadApp account from the command line.
|
||||
|
||||
To use it, add `cloudapp` to the plugins array of your `~/.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... cloudapp)
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
1. [Aaron Russell's `cloudapp_api` gem](https://github.com/aaronrussell/cloudapp_api#installation)
|
||||
|
||||
2. That you set your CloudApp credentials in `~/.cloudapp` as a simple text file like below:
|
||||
```
|
||||
email
|
||||
password
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- `cloudapp <filename>`: uploads `<filename>` to your CloudApp account, and if you're using
|
||||
macOS, copies the URL to your clipboard.
|
||||
@@ -1,4 +0,0 @@
|
||||
print -Pn "%F{yellow}"
|
||||
print "[oh-my-zsh] The CloudApp API no longer works, so the cloudapp plugin will"
|
||||
print "[oh-my-zsh] be removed shortly. Please remove it from your plugins list."
|
||||
print -Pn "%f"
|
||||
1
zsh/plugins/colemak/.gitignore
vendored
Normal file
1
zsh/plugins/colemak/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.less
|
||||
@@ -19,4 +19,15 @@ bindkey -a 'N' vi-join
|
||||
bindkey -a 'j' vi-forward-word-end
|
||||
bindkey -a 'J' vi-forward-blank-word-end
|
||||
|
||||
lesskey $ZSH/plugins/colemak/colemak-less
|
||||
# New less versions will read this file directly
|
||||
export LESSKEYIN="${0:h:A}/colemak-less"
|
||||
|
||||
# Only run lesskey if less version is older than v582
|
||||
less_ver=$(less --version | awk '{print $2;exit}')
|
||||
autoload -Uz is-at-least
|
||||
if ! is-at-least 582 $less_ver; then
|
||||
# Old less versions will read this transformed file
|
||||
export LESSKEY="${0:h:A}/.less"
|
||||
lesskey -o "$LESSKEY" "$LESSKEYIN" 2>/dev/null
|
||||
fi
|
||||
unset less_ver
|
||||
|
||||
@@ -5,10 +5,12 @@ copybuffer () {
|
||||
if which clipcopy &>/dev/null; then
|
||||
printf "%s" "$BUFFER" | clipcopy
|
||||
else
|
||||
echo "clipcopy function not found. Please make sure you have Oh My Zsh installed correctly."
|
||||
zle -M "clipcopy not found. Please make sure you have Oh My Zsh installed correctly."
|
||||
fi
|
||||
}
|
||||
|
||||
zle -N copybuffer
|
||||
|
||||
bindkey "^O" copybuffer
|
||||
bindkey -M emacs "^O" copybuffer
|
||||
bindkey -M viins "^O" copybuffer
|
||||
bindkey -M vicmd "^O" copybuffer
|
||||
|
||||
@@ -25,7 +25,7 @@ The enabled options for rsync are:
|
||||
|
||||
* `-hhh`: outputs numbers in human-readable format, in units of 1024 (K, M, G, T).
|
||||
|
||||
* `--backup-dir=/tmp/rsync`: move backup copies to "/tmp/rsync".
|
||||
* `--backup-dir="/tmp/rsync-$USERNAME"`: move backup copies to "/tmp/rsync-$USERNAME".
|
||||
|
||||
* `-e /dev/null`: only work on local files (disable remote shells).
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
cpv() {
|
||||
rsync -pogbr -hhh --backup-dir=/tmp/rsync -e /dev/null --progress "$@"
|
||||
rsync -pogbr -hhh --backup-dir="/tmp/rsync-${USERNAME}" -e /dev/null --progress "$@"
|
||||
}
|
||||
compdef _files cpv
|
||||
|
||||
@@ -10,7 +10,7 @@ plugins=(... debian)
|
||||
|
||||
## Settings
|
||||
|
||||
- `$apt_pref`: use apt or aptitude if installed, fallback is apt-get.
|
||||
- `$apt_pref`: use aptitude or apt if installed, fallback is apt-get.
|
||||
- `$apt_upgr`: use upgrade or safe-upgrade (for aptitude).
|
||||
|
||||
Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh My Zsh) to override this behavior.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# Use apt or aptitude if installed, fallback is apt-get
|
||||
# Use aptitude or apt if installed, fallback is apt-get
|
||||
# You can just set apt_pref='apt-get' to override it.
|
||||
|
||||
if [[ -z $apt_pref || -z $apt_upgr ]]; then
|
||||
if [[ -e $commands[apt] ]]; then
|
||||
apt_pref='apt'
|
||||
apt_upgr='upgrade'
|
||||
elif [[ -e $commands[aptitude] ]]; then
|
||||
if [[ -e $commands[aptitude] ]]; then
|
||||
apt_pref='aptitude'
|
||||
apt_upgr='safe-upgrade'
|
||||
elif [[ -e $commands[apt] ]]; then
|
||||
apt_pref='apt'
|
||||
apt_upgr='upgrade'
|
||||
else
|
||||
apt_pref='apt-get'
|
||||
apt_upgr='upgrade'
|
||||
|
||||
1
zsh/plugins/deno/.gitignore
vendored
1
zsh/plugins/deno/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
_deno
|
||||
@@ -12,12 +12,24 @@ alias dup='deno upgrade'
|
||||
|
||||
# COMPLETION FUNCTION
|
||||
if (( $+commands[deno] )); then
|
||||
if [[ ! -f $ZSH_CACHE_DIR/deno_version ]] \
|
||||
|| [[ "$(deno --version)" != "$(< "$ZSH_CACHE_DIR/deno_version")" ]] \
|
||||
|| [[ ! -f $ZSH/plugins/deno/_deno ]]; then
|
||||
deno completions zsh > $ZSH/plugins/deno/_deno
|
||||
deno --version > $ZSH_CACHE_DIR/deno_version
|
||||
# remove old generated completion file
|
||||
command rm -f "${0:A:h}/_deno"
|
||||
|
||||
ver="$(deno --version)"
|
||||
ver_file="$ZSH_CACHE_DIR/deno_version"
|
||||
comp_file="$ZSH_CACHE_DIR/completions/_deno"
|
||||
|
||||
mkdir -p "${comp_file:h}"
|
||||
(( ${fpath[(Ie)${comp_file:h}]} )) || fpath=("${comp_file:h}" $fpath)
|
||||
|
||||
if [[ ! -f "$comp_file" || ! -f "$ver_file" || "$ver" != "$(< "$ver_file")" ]]; then
|
||||
deno completions zsh >| "$comp_file"
|
||||
echo "$ver" >| "$ver_file"
|
||||
fi
|
||||
|
||||
declare -A _comps
|
||||
autoload -Uz _deno
|
||||
_comps[deno]=_deno
|
||||
|
||||
unset ver ver_file comp_file
|
||||
fi
|
||||
|
||||
@@ -17,6 +17,9 @@ plugins=(... dirhistory)
|
||||
| <kbd>alt</kbd> + <kbd>up</kbd> | Move into the parent directory |
|
||||
| <kbd>alt</kbd> + <kbd>down</kbd> | Move into the first child directory by alphabetical order |
|
||||
|
||||
NOTE: some terminals might override the ALT+Arrows key bindings (Windows Terminal, for example).
|
||||
If these don't work check your terminal settings and change them to a different keyboard shortcut.
|
||||
|
||||
## Usage
|
||||
|
||||
This plugin allows you to navigate the history of previous current-working-directories using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT. MAC users may alternately use OPT-LEFT and OPT-RIGHT.
|
||||
|
||||
@@ -1343,7 +1343,7 @@ __docker_node_complete_ls_filters() {
|
||||
;;
|
||||
esac
|
||||
else
|
||||
opts=('id' 'label' 'membership' 'name' 'role')
|
||||
opts=('id' 'label' 'membership' 'name' 'node.label' 'role')
|
||||
_describe -t filter-opts "filter options" opts -qS "=" && ret=0
|
||||
fi
|
||||
|
||||
@@ -2544,6 +2544,82 @@ __docker_volume_subcommand() {
|
||||
|
||||
# EO volume
|
||||
|
||||
# BO context
|
||||
|
||||
__docker_complete_contexts() {
|
||||
[[ $PREFIX = -* ]] && return 1
|
||||
integer ret=1
|
||||
declare -a contexts
|
||||
|
||||
contexts=(${(f)${:-"$(_call_program commands docker $docker_options context ls -q)"$'\n'}})
|
||||
|
||||
_describe -t context-list "context" contexts && ret=0
|
||||
return ret
|
||||
}
|
||||
|
||||
__docker_context_commands() {
|
||||
local -a _docker_context_subcommands
|
||||
_docker_context_subcommands=(
|
||||
"create:Create new context"
|
||||
"inspect:Display detailed information on one or more contexts"
|
||||
"list:List available contexts"
|
||||
"rm:Remove one or more contexts"
|
||||
"show:Print the current context"
|
||||
"update:Update a context"
|
||||
"use:Set the default context"
|
||||
)
|
||||
_describe -t docker-context-commands "docker context command" _docker_context_subcommands
|
||||
}
|
||||
|
||||
__docker_context_subcommand() {
|
||||
local -a _command_args opts_help
|
||||
local expl help="--help"
|
||||
integer ret=1
|
||||
|
||||
opts_help=("(: -)--help[Print usage]")
|
||||
|
||||
case "$words[1]" in
|
||||
(create)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help)--from=[Create context from a named context]:from:__docker_complete_contexts" \
|
||||
"($help -):name: " && ret=0
|
||||
;;
|
||||
(use)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(inspect)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(rm)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(update)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help -):name:" && ret=0
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
# EO context
|
||||
|
||||
__docker_caching_policy() {
|
||||
oldp=( "$1"(Nmh+1) ) # 1 hour
|
||||
(( $#oldp ))
|
||||
@@ -2576,7 +2652,7 @@ __docker_commands() {
|
||||
then
|
||||
local -a lines
|
||||
lines=(${(f)"$(_call_program commands docker 2>&1)"})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/ ##/:})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/\*# ##/:})
|
||||
_docker_subcommands=($_docker_subcommands 'daemon:Enable daemon mode' 'help:Show help for a command')
|
||||
(( $#_docker_subcommands > 2 )) && _store_cache docker_subcommands _docker_subcommands
|
||||
fi
|
||||
@@ -2631,6 +2707,23 @@ __docker_subcommand() {
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
(context)
|
||||
local curcontext="$curcontext" state
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -): :->command" \
|
||||
"($help -)*:: :->option-or-argument" && ret=0
|
||||
|
||||
case $state in
|
||||
(command)
|
||||
__docker_context_commands && ret=0
|
||||
;;
|
||||
(option-or-argument)
|
||||
curcontext=${curcontext%:*:*}:docker-${words[-1]}:
|
||||
__docker_context_subcommand && ret=0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
(daemon)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
@@ -2698,7 +2791,8 @@ __docker_subcommand() {
|
||||
"($help)--tlsverify[Use TLS and verify the remote]" \
|
||||
"($help)--userns-remap=[User/Group setting for user namespaces]:user\:group:->users-groups" \
|
||||
"($help)--userland-proxy[Use userland proxy for loopback traffic]" \
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" && ret=0
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" \
|
||||
"($help)--validate[Validate daemon configuration and exit]" && ret=0
|
||||
|
||||
case $state in
|
||||
(cluster-store)
|
||||
|
||||
@@ -11,41 +11,51 @@
|
||||
## Functions
|
||||
|
||||
source_env() {
|
||||
if [[ -f $ZSH_DOTENV_FILE ]]; then
|
||||
if [[ "$ZSH_DOTENV_PROMPT" != false ]]; then
|
||||
local confirmation dirpath="${PWD:A}"
|
||||
if [[ ! -f "$ZSH_DOTENV_FILE" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# make sure there is an (dis-)allowed file
|
||||
touch "$ZSH_DOTENV_ALLOWED_LIST"
|
||||
touch "$ZSH_DOTENV_DISALLOWED_LIST"
|
||||
if [[ "$ZSH_DOTENV_PROMPT" != false ]]; then
|
||||
local confirmation dirpath="${PWD:A}"
|
||||
|
||||
# early return if disallowed
|
||||
if grep -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
return;
|
||||
fi
|
||||
# make sure there is an (dis-)allowed file
|
||||
touch "$ZSH_DOTENV_ALLOWED_LIST"
|
||||
touch "$ZSH_DOTENV_DISALLOWED_LIST"
|
||||
|
||||
# check if current directory's .env file is allowed or ask for confirmation
|
||||
if ! grep -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
# print same-line prompt and output newline character if necessary
|
||||
echo -n "dotenv: found '$ZSH_DOTENV_FILE' file. Source it? ([Y]es/[n]o/[a]lways/n[e]ver) "
|
||||
read -k 1 confirmation; [[ "$confirmation" != $'\n' ]] && echo
|
||||
|
||||
# check input
|
||||
case "$confirmation" in
|
||||
[nN]) return ;;
|
||||
[aA]) echo "$dirpath" >> "$ZSH_DOTENV_ALLOWED_LIST" ;;
|
||||
[eE]) echo "$dirpath" >> "$ZSH_DOTENV_DISALLOWED_LIST"; return ;;
|
||||
*) ;; # interpret anything else as a yes
|
||||
esac
|
||||
fi
|
||||
# early return if disallowed
|
||||
if command grep -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
return
|
||||
fi
|
||||
|
||||
# test .env syntax
|
||||
zsh -fn $ZSH_DOTENV_FILE || echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
# check if current directory's .env file is allowed or ask for confirmation
|
||||
if ! command grep -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
# get cursor column and print new line before prompt if not at line beginning
|
||||
local column
|
||||
echo -ne "\e[6n" > /dev/tty
|
||||
read -t 1 -s -d R column < /dev/tty
|
||||
column="${column##*\[*;}"
|
||||
[[ $column -eq 1 ]] || echo
|
||||
|
||||
setopt localoptions allexport
|
||||
source $ZSH_DOTENV_FILE
|
||||
# print same-line prompt and output newline character if necessary
|
||||
echo -n "dotenv: found '$ZSH_DOTENV_FILE' file. Source it? ([Y]es/[n]o/[a]lways/n[e]ver) "
|
||||
read -k 1 confirmation
|
||||
[[ "$confirmation" = $'\n' ]] || echo
|
||||
|
||||
# check input
|
||||
case "$confirmation" in
|
||||
[nN]) return ;;
|
||||
[aA]) echo "$dirpath" >> "$ZSH_DOTENV_ALLOWED_LIST" ;;
|
||||
[eE]) echo "$dirpath" >> "$ZSH_DOTENV_DISALLOWED_LIST"; return ;;
|
||||
*) ;; # interpret anything else as a yes
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# test .env syntax
|
||||
zsh -fn $ZSH_DOTENV_FILE || echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
|
||||
setopt localoptions allexport
|
||||
source $ZSH_DOTENV_FILE
|
||||
}
|
||||
|
||||
autoload -U add-zsh-hook
|
||||
|
||||
@@ -21,6 +21,8 @@ plugins=(... extract)
|
||||
| `apk` | Android app file |
|
||||
| `aar` | Android library file |
|
||||
| `bz2` | Bzip2 file |
|
||||
| `cab` | Microsoft cabinet archive |
|
||||
| `cpio` | Cpio archive |
|
||||
| `deb` | Debian package |
|
||||
| `ear` | Enterprise Application aRchive |
|
||||
| `gz` | Gzip file |
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
|
||||
_arguments \
|
||||
'(-r --remove)'{-r,--remove}'[Remove archive.]' \
|
||||
"*::archive file:_files -g '(#i)*.(7z|Z|apk|aar|bz2|deb|ear|gz|ipa|ipsw|jar|lrz|lz4|lzma|rar|rpm|sublime-package|tar|tar.bz2|tar.gz|tar.lrz|tar.lz|tar.lz4|tar.xz|tar.zma|tar.zst|tbz|tbz2|tgz|tlz|txz|tzst|war|whl|xpi|xz|zip|zst)(-.)'" \
|
||||
"*::archive file:_files -g '(#i)*.(7z|Z|apk|aar|bz2|cab|cpio|deb|ear|gz|ipa|ipsw|jar|lrz|lz4|lzma|rar|rpm|sublime-package|tar|tar.bz2|tar.gz|tar.lrz|tar.lz|tar.lz4|tar.xz|tar.zma|tar.zst|tbz|tbz2|tgz|tlz|txz|tzst|war|whl|xpi|xz|zip|zst)(-.)'" \
|
||||
&& return 0
|
||||
|
||||
@@ -1,82 +1,85 @@
|
||||
alias x=extract
|
||||
|
||||
extract() {
|
||||
local remove_archive
|
||||
local success
|
||||
local extract_dir
|
||||
setopt localoptions noautopushd
|
||||
|
||||
if (( $# == 0 )); then
|
||||
cat <<-'EOF' >&2
|
||||
Usage: extract [-option] [file ...]
|
||||
if (( $# == 0 )); then
|
||||
cat >&2 <<'EOF'
|
||||
Usage: extract [-option] [file ...]
|
||||
|
||||
Options:
|
||||
-r, --remove Remove archive after unpacking.
|
||||
EOF
|
||||
fi
|
||||
Options:
|
||||
-r, --remove Remove archive after unpacking.
|
||||
EOF
|
||||
fi
|
||||
|
||||
remove_archive=1
|
||||
if [[ "$1" == "-r" ]] || [[ "$1" == "--remove" ]]; then
|
||||
remove_archive=0
|
||||
shift
|
||||
fi
|
||||
local remove_archive=1
|
||||
if [[ "$1" == "-r" ]] || [[ "$1" == "--remove" ]]; then
|
||||
remove_archive=0
|
||||
shift
|
||||
fi
|
||||
|
||||
while (( $# > 0 )); do
|
||||
if [[ ! -f "$1" ]]; then
|
||||
echo "extract: '$1' is not a valid file" >&2
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
local pwd="$PWD"
|
||||
while (( $# > 0 )); do
|
||||
if [[ ! -f "$1" ]]; then
|
||||
echo "extract: '$1' is not a valid file" >&2
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
success=0
|
||||
extract_dir="${1:t:r}"
|
||||
case "${1:l}" in
|
||||
(*.tar.gz|*.tgz) (( $+commands[pigz] )) && { pigz -dc "$1" | tar xv } || tar zxvf "$1" ;;
|
||||
(*.tar.bz2|*.tbz|*.tbz2) tar xvjf "$1" ;;
|
||||
(*.tar.xz|*.txz)
|
||||
tar --xz --help &> /dev/null \
|
||||
&& tar --xz -xvf "$1" \
|
||||
|| xzcat "$1" | tar xvf - ;;
|
||||
(*.tar.zma|*.tlz)
|
||||
tar --lzma --help &> /dev/null \
|
||||
&& tar --lzma -xvf "$1" \
|
||||
|| lzcat "$1" | tar xvf - ;;
|
||||
(*.tar.zst|*.tzst)
|
||||
tar --zstd --help &> /dev/null \
|
||||
&& tar --zstd -xvf "$1" \
|
||||
|| zstdcat "$1" | tar xvf - ;;
|
||||
(*.tar) tar xvf "$1" ;;
|
||||
(*.tar.lz) (( $+commands[lzip] )) && tar xvf "$1" ;;
|
||||
(*.tar.lz4) lz4 -c -d "$1" | tar xvf - ;;
|
||||
(*.tar.lrz) (( $+commands[lrzuntar] )) && lrzuntar "$1" ;;
|
||||
(*.gz) (( $+commands[pigz] )) && pigz -dk "$1" || gunzip -k "$1" ;;
|
||||
(*.bz2) bunzip2 "$1" ;;
|
||||
(*.xz) unxz "$1" ;;
|
||||
(*.lrz) (( $+commands[lrunzip] )) && lrunzip "$1" ;;
|
||||
(*.lz4) lz4 -d "$1" ;;
|
||||
(*.lzma) unlzma "$1" ;;
|
||||
(*.z) uncompress "$1" ;;
|
||||
(*.zip|*.war|*.jar|*.ear|*.sublime-package|*.ipa|*.ipsw|*.xpi|*.apk|*.aar|*.whl) unzip "$1" -d $extract_dir ;;
|
||||
(*.rar) unrar x -ad "$1" ;;
|
||||
(*.rpm) mkdir "$extract_dir" && cd "$extract_dir" && rpm2cpio "../$1" | cpio --quiet -id && cd .. ;;
|
||||
(*.7z) 7za x "$1" ;;
|
||||
(*.deb)
|
||||
mkdir -p "$extract_dir/control"
|
||||
mkdir -p "$extract_dir/data"
|
||||
cd "$extract_dir"; ar vx "../${1}" > /dev/null
|
||||
cd control; tar xzvf ../control.tar.gz
|
||||
cd ../data; extract ../data.tar.*
|
||||
cd ..; rm *.tar.* debian-binary
|
||||
cd ..
|
||||
;;
|
||||
(*.zst) unzstd "$1" ;;
|
||||
(*)
|
||||
echo "extract: '$1' cannot be extracted" >&2
|
||||
success=1
|
||||
;;
|
||||
esac
|
||||
local success=0
|
||||
local extract_dir="${1:t:r}"
|
||||
local file="$1" full_path="${1:A}"
|
||||
case "${file:l}" in
|
||||
(*.tar.gz|*.tgz) (( $+commands[pigz] )) && { pigz -dc "$file" | tar xv } || tar zxvf "$file" ;;
|
||||
(*.tar.bz2|*.tbz|*.tbz2) tar xvjf "$file" ;;
|
||||
(*.tar.xz|*.txz)
|
||||
tar --xz --help &> /dev/null \
|
||||
&& tar --xz -xvf "$file" \
|
||||
|| xzcat "$file" | tar xvf - ;;
|
||||
(*.tar.zma|*.tlz)
|
||||
tar --lzma --help &> /dev/null \
|
||||
&& tar --lzma -xvf "$file" \
|
||||
|| lzcat "$file" | tar xvf - ;;
|
||||
(*.tar.zst|*.tzst)
|
||||
tar --zstd --help &> /dev/null \
|
||||
&& tar --zstd -xvf "$file" \
|
||||
|| zstdcat "$file" | tar xvf - ;;
|
||||
(*.tar) tar xvf "$file" ;;
|
||||
(*.tar.lz) (( $+commands[lzip] )) && tar xvf "$file" ;;
|
||||
(*.tar.lz4) lz4 -c -d "$file" | tar xvf - ;;
|
||||
(*.tar.lrz) (( $+commands[lrzuntar] )) && lrzuntar "$file" ;;
|
||||
(*.gz) (( $+commands[pigz] )) && pigz -dk "$file" || gunzip -k "$file" ;;
|
||||
(*.bz2) bunzip2 "$file" ;;
|
||||
(*.xz) unxz "$file" ;;
|
||||
(*.lrz) (( $+commands[lrunzip] )) && lrunzip "$file" ;;
|
||||
(*.lz4) lz4 -d "$file" ;;
|
||||
(*.lzma) unlzma "$file" ;;
|
||||
(*.z) uncompress "$file" ;;
|
||||
(*.zip|*.war|*.jar|*.ear|*.sublime-package|*.ipa|*.ipsw|*.xpi|*.apk|*.aar|*.whl) unzip "$file" -d "$extract_dir" ;;
|
||||
(*.rar) unrar x -ad "$file" ;;
|
||||
(*.rpm)
|
||||
command mkdir -p "$extract_dir" && builtin cd -q "$extract_dir" \
|
||||
&& rpm2cpio "$full_path" | cpio --quiet -id ;;
|
||||
(*.7z) 7za x "$file" ;;
|
||||
(*.deb)
|
||||
command mkdir -p "$extract_dir/control" "$extract_dir/data"
|
||||
builtin cd -q "$extract_dir"; ar vx "$full_path" > /dev/null
|
||||
builtin cd -q control; extract ../control.tar.*
|
||||
builtin cd -q ../data; extract ../data.tar.*
|
||||
builtin cd -q ..; command rm *.tar.* debian-binary ;;
|
||||
(*.zst) unzstd "$file" ;;
|
||||
(*.cab) cabextract -d "$extract_dir" "$file" ;;
|
||||
(*.cpio) cpio -idmvF "$file" ;;
|
||||
(*)
|
||||
echo "extract: '$file' cannot be extracted" >&2
|
||||
success=1 ;;
|
||||
esac
|
||||
|
||||
(( success = $success > 0 ? $success : $? ))
|
||||
(( $success == 0 )) && (( $remove_archive == 0 )) && rm "$1"
|
||||
shift
|
||||
done
|
||||
(( success = success > 0 ? success : $? ))
|
||||
(( success == 0 && remove_archive == 0 )) && rm "$full_path"
|
||||
shift
|
||||
|
||||
# Go back to original working directory in case we ran cd previously
|
||||
builtin cd -q "$pwd"
|
||||
done
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
The fedora plugin is deprecated. Use the [dnf plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/dnf) instead.
|
||||
@@ -1,3 +0,0 @@
|
||||
print -P "%F{yellow}The 'fedora' plugin is deprecated. Use the '%Udnf%u' plugin instead.%f"
|
||||
|
||||
source "$ZSH/plugins/dnf/dnf.plugin.zsh"
|
||||
9
zsh/plugins/fnm/README.md
Normal file
9
zsh/plugins/fnm/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# fnm plugin
|
||||
|
||||
This plugin adds autocompletion for [fnm](https://github.com/Schniz/fnm) - a Node.js version manager.
|
||||
|
||||
To use it, add `fnm` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... fnm)
|
||||
```
|
||||
23
zsh/plugins/fnm/fnm.plugin.zsh
Normal file
23
zsh/plugins/fnm/fnm.plugin.zsh
Normal file
@@ -0,0 +1,23 @@
|
||||
if (( $+commands[fnm] )); then
|
||||
# remove old generated completion file
|
||||
command rm -f "${0:A:h}/_fnm"
|
||||
|
||||
ver="$(fnm --version)"
|
||||
ver_file="$ZSH_CACHE_DIR/fnm_version"
|
||||
comp_file="$ZSH_CACHE_DIR/completions/_fnm"
|
||||
|
||||
mkdir -p "${comp_file:h}"
|
||||
(( ${fpath[(Ie)${comp_file:h}]} )) || fpath=("${comp_file:h}" $fpath)
|
||||
|
||||
if [[ ! -f "$comp_file" || ! -f "$ver_file" || "$ver" != "$(< "$ver_file")" ]]; then
|
||||
fnm completions --shell=zsh >| "$comp_file"
|
||||
echo "$ver" >| "$ver_file"
|
||||
fi
|
||||
|
||||
declare -A _comps
|
||||
autoload -Uz _fnm
|
||||
_comps[fnm]=_fnm
|
||||
|
||||
unset ver ver_file comp_file
|
||||
fi
|
||||
|
||||
@@ -29,14 +29,10 @@ alias unheap='frontend unheap'
|
||||
alias vuejs='frontend vuejs'
|
||||
|
||||
function _frontend_fallback() {
|
||||
local url
|
||||
if [[ "$FRONTEND_SEARCH_FALLBACK" == duckduckgo ]]; then
|
||||
url="https://duckduckgo.com/?sites=$1&q="
|
||||
else
|
||||
url="https://google.com/search?as_sitesearch=$1&as_q="
|
||||
fi
|
||||
|
||||
echo "$url"
|
||||
case "$FRONTEND_SEARCH_FALLBACK" in
|
||||
duckduckgo) echo "https://duckduckgo.com/?sites=$1&q=" ;;
|
||||
*) echo "https://google.com/search?as_sitesearch=$1&as_q=" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
function frontend() {
|
||||
@@ -51,7 +47,7 @@ function frontend() {
|
||||
bootsnipp 'https://bootsnipp.com/search?q='
|
||||
bundlephobia 'https://bundlephobia.com/result?p='
|
||||
caniuse 'https://caniuse.com/#search='
|
||||
codepen 'https://codepen.io/search?q='
|
||||
codepen 'https://codepen.io/search/pens?q='
|
||||
compassdoc 'http://compass-style.org/search?q='
|
||||
cssflow 'http://www.cssflow.com/search?q='
|
||||
dartlang 'https://api.dartlang.org/apidocs/channels/stable/dartdoc-viewer/dart:'
|
||||
|
||||
@@ -7,6 +7,7 @@ if [[ -z "${CLOUDSDK_HOME}" ]]; then
|
||||
search_locations=(
|
||||
"$HOME/google-cloud-sdk"
|
||||
"/usr/local/Caskroom/google-cloud-sdk/latest/google-cloud-sdk"
|
||||
"/opt/homebrew/Caskroom/google-cloud-sdk/latest/google-cloud-sdk"
|
||||
"/usr/share/google-cloud-sdk"
|
||||
"/snap/google-cloud-sdk/current"
|
||||
"/usr/lib64/google-cloud-sdk/"
|
||||
|
||||
1
zsh/plugins/gh/.gitignore
vendored
1
zsh/plugins/gh/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
_gh
|
||||
@@ -1,13 +1,24 @@
|
||||
# Autocompletion for the GitHub CLI (gh).
|
||||
|
||||
if (( $+commands[gh] )); then
|
||||
if [[ ! -r "$ZSH_CACHE_DIR/gh_version" \
|
||||
|| "$(gh --version)" != "$(< "$ZSH_CACHE_DIR/gh_version")"
|
||||
|| ! -f "$ZSH/plugins/gh/_gh" ]]; then
|
||||
gh completion --shell zsh > $ZSH/plugins/gh/_gh
|
||||
gh --version > $ZSH_CACHE_DIR/gh_version
|
||||
# remove old generated completion file
|
||||
command rm -f "${0:A:h}/_gh"
|
||||
|
||||
ver="$(gh --version)"
|
||||
ver_file="$ZSH_CACHE_DIR/gh_version"
|
||||
comp_file="$ZSH_CACHE_DIR/completions/_gh"
|
||||
|
||||
mkdir -p "${comp_file:h}"
|
||||
(( ${fpath[(Ie)${comp_file:h}]} )) || fpath=("${comp_file:h}" $fpath)
|
||||
|
||||
if [[ ! -f "$comp_file" || ! -f "$ver_file" || "$ver" != "$(< "$ver_file")" ]]; then
|
||||
gh completion --shell zsh >| "$comp_file"
|
||||
echo "$ver" >| "$ver_file"
|
||||
fi
|
||||
|
||||
declare -A _comps
|
||||
autoload -Uz _gh
|
||||
_comps[gh]=_gh
|
||||
|
||||
unset ver ver_file comp_file
|
||||
fi
|
||||
|
||||
|
||||
@@ -11,8 +11,9 @@ function git-fetch-all {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Do nothing if auto-fetch disabled
|
||||
if [[ -z "$gitdir" || -f "$gitdir/NO_AUTO_FETCH" ]]; then
|
||||
# Do nothing if auto-fetch is disabled or don't have permissions
|
||||
if [[ ! -w "$gitdir" || -f "$gitdir/NO_AUTO_FETCH" ]] ||
|
||||
[[ -f "$gitdir/FETCH_LOG" && ! -w "$gitdir/FETCH_LOG" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -24,8 +25,9 @@ function git-fetch-all {
|
||||
fi
|
||||
|
||||
# Fetch all remotes (avoid ssh passphrase prompt)
|
||||
date -R &>! "$gitdir/FETCH_LOG"
|
||||
GIT_SSH_COMMAND="command ssh -o BatchMode=yes" \
|
||||
command git fetch --all 2>/dev/null &>! "$gitdir/FETCH_LOG"
|
||||
command git fetch --all 2>/dev/null &>> "$gitdir/FETCH_LOG"
|
||||
) &|
|
||||
}
|
||||
|
||||
|
||||
@@ -23,9 +23,9 @@
|
||||
#Alias
|
||||
alias gfl='git flow'
|
||||
alias gfli='git flow init'
|
||||
alias gcd='git checkout develop'
|
||||
alias gch='git checkout hotfix'
|
||||
alias gcr='git checkout release'
|
||||
alias gcd='git checkout $(git config gitflow.branch.develop)'
|
||||
alias gch='git checkout $(git config gitflow.prefix.hotfix)'
|
||||
alias gcr='git checkout $(git config gitflow.prefix.release)'
|
||||
alias gflf='git flow feature'
|
||||
alias gflh='git flow hotfix'
|
||||
alias gflr='git flow release'
|
||||
|
||||
@@ -23,7 +23,7 @@ plugins=(... git)
|
||||
| gb | git branch |
|
||||
| gba | git branch -a |
|
||||
| gbd | git branch -d |
|
||||
| gbda | git branch --no-color --merged \| command grep -vE "^(\+\|\*\|\s*($(git_main_branch)\|development\|develop\|devel\|dev)\s*$)" \| command xargs -n 1 git branch -d |
|
||||
| gbda | git branch --no-color --merged \| grep -vE "^([+*]\|\s*($(git_main_branch)\|$(git_develop_branch))\s*$)" \| xargs git branch -d 2>/dev/null |
|
||||
| gbD | git branch -D |
|
||||
| gbl | git blame -b -w |
|
||||
| gbnm | git branch --no-merged |
|
||||
@@ -49,10 +49,11 @@ plugins=(... git)
|
||||
| gcl | git clone --recurse-submodules |
|
||||
| gclean | git clean -id |
|
||||
| gpristine | git reset --hard && git clean -dffx |
|
||||
| gcm | git checkout $(git_main_branch) |
|
||||
| gcd | git checkout develop |
|
||||
| gcm | git checkout $(git_main_branch) |
|
||||
| gcd | git checkout $(git_develop_branch) |
|
||||
| gcmsg | git commit -m |
|
||||
| gco | git checkout |
|
||||
| gcor | git checkout --recurse-submodules |
|
||||
| gcount | git shortlog -sn |
|
||||
| gcp | git cherry-pick |
|
||||
| gcpa | git cherry-pick --abort |
|
||||
@@ -65,6 +66,7 @@ plugins=(... git)
|
||||
| gds | git diff --staged |
|
||||
| gdt | git diff-tree --no-commit-id --name-only -r |
|
||||
| gdnolock | git diff $@ ":(exclude)package-lock.json" ":(exclude)*.lock" |
|
||||
| gdu | git diff @{u} |
|
||||
| gdv | git diff -w $@ \| view - |
|
||||
| gdw | git diff --word-diff |
|
||||
| gf | git fetch |
|
||||
@@ -87,7 +89,7 @@ plugins=(... git)
|
||||
| ghh | git help |
|
||||
| gignore | git update-index --assume-unchanged |
|
||||
| gignored | git ls-files -v \| grep "^[[:lower:]]" |
|
||||
| git-svn-dcommit-push | git svn dcommit && git push github $(git_main_branch):svntrunk |
|
||||
| git-svn-dcommit-push | git svn dcommit && git push github $(git_main_branch):svntrunk |
|
||||
| gk | gitk --all --branches |
|
||||
| gke | gitk --all $(git log -g --pretty=%h) |
|
||||
| gl | git pull |
|
||||
@@ -97,25 +99,26 @@ plugins=(... git)
|
||||
| glgga | git log --graph --decorate --all |
|
||||
| glgm | git log --graph --max-count=10 |
|
||||
| glo | git log --oneline --decorate |
|
||||
| glol | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' |
|
||||
| glols | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --stat |
|
||||
| glol | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset' |
|
||||
| glols | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset' --stat |
|
||||
| glod | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset' |
|
||||
| glods | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset' --date=short |
|
||||
| glola | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --all |
|
||||
| glola | git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset' --all |
|
||||
| glog | git log --oneline --decorate --graph |
|
||||
| gloga | git log --oneline --decorate --graph --all |
|
||||
| glp | git log --pretty=\<format\> |
|
||||
| gm | git merge |
|
||||
| gmom | git merge origin/$(git_main_branch) |
|
||||
| gmt | git mergetool --no-prompt |
|
||||
| gmtvim | git mergetool --no-prompt --tool=vimdiff |
|
||||
| gmum | git merge upstream/$(git_main_branch) |
|
||||
| gmom | git merge origin/$(git_main_branch) |
|
||||
| gmtl | git mergetool --no-prompt |
|
||||
| gmtlvim | git mergetool --no-prompt --tool=vimdiff |
|
||||
| gmum | git merge upstream/$(git_main_branch) |
|
||||
| gma | git merge --abort |
|
||||
| gp | git push |
|
||||
| gpd | git push --dry-run |
|
||||
| gpf | git push --force-with-lease |
|
||||
| gpf! | git push --force |
|
||||
| gpoat | git push origin --all && git push origin --tags |
|
||||
| gpr | git pull --rebase |
|
||||
| gpu | git push upstream |
|
||||
| gpv | git push -v |
|
||||
| gr | git remote |
|
||||
@@ -123,10 +126,10 @@ plugins=(... git)
|
||||
| grb | git rebase |
|
||||
| grba | git rebase --abort |
|
||||
| grbc | git rebase --continue |
|
||||
| grbd | git rebase develop |
|
||||
| grbd | git rebase $(git_develop_branch) |
|
||||
| grbi | git rebase -i |
|
||||
| grbm | git rebase $(git_main_branch) |
|
||||
| grbo | git rebase --onto |
|
||||
| grbm | git rebase $(git_main_branch) |
|
||||
| grbo | git rebase --onto |
|
||||
| grbs | git rebase --skip |
|
||||
| grev | git revert |
|
||||
| grh | git reset |
|
||||
@@ -174,7 +177,7 @@ plugins=(... git)
|
||||
| gupv | git pull --rebase -v |
|
||||
| gupa | git pull --rebase --autostash |
|
||||
| gupav | git pull --rebase --autostash -v |
|
||||
| glum | git pull upstream $(git_main_branch) |
|
||||
| glum | git pull upstream $(git_main_branch) |
|
||||
| gwch | git whatchanged -p --abbrev-commit --pretty=medium |
|
||||
| gwip | git add -A; git rm $(git ls-files --deleted) 2> /dev/null; git commit --no-verify --no-gpg-sign -m "--wip-- [skip ci]" |
|
||||
| gam | git am |
|
||||
@@ -212,13 +215,14 @@ These are aliases that have been removed, renamed, or otherwise modified in a wa
|
||||
|
||||
### Current
|
||||
|
||||
| Command | Description |
|
||||
|:-----------------------|:-----------------------------------------------------------------------------|
|
||||
| `grename <old> <new>` | Rename `old` branch to `new`, including in origin remote |
|
||||
| current_branch | Return the name of the current branch |
|
||||
| git_current_user_name | Returns the `user.name` config value |
|
||||
| git_current_user_email | Returns the `user.email` config value |
|
||||
| git_main_branch | Returns the name of the main branch: `main` if it exists, `master` otherwise |
|
||||
| Command | Description |
|
||||
|:-----------------------|:---------------------------------------------------------------------------------------------------------|
|
||||
| `grename <old> <new>` | Rename `old` branch to `new`, including in origin remote |
|
||||
| current_branch | Return the name of the current branch |
|
||||
| git_current_user_name | Returns the `user.name` config value |
|
||||
| git_current_user_email | Returns the `user.email` config value |
|
||||
| git_main_branch | Returns the name of the main branch: `main` if it exists, `master` otherwise |
|
||||
| git_develop_branch | Returns the name of the develop branch: `dev`, `devel`, `development` if they exist, `develop` otherwise |
|
||||
|
||||
### Work in Progress (WIP)
|
||||
|
||||
|
||||
@@ -31,15 +31,28 @@ function work_in_progress() {
|
||||
|
||||
# Check if main exists and use instead of master
|
||||
function git_main_branch() {
|
||||
command git rev-parse --git-dir &>/dev/null || return
|
||||
local ref
|
||||
for ref in refs/{heads,remotes/{origin,upstream}}/{main,trunk}; do
|
||||
if command git show-ref -q --verify $ref; then
|
||||
echo ${ref:t}
|
||||
return
|
||||
fi
|
||||
done
|
||||
echo master
|
||||
}
|
||||
|
||||
# Check for develop and similarly named branches
|
||||
function git_develop_branch() {
|
||||
command git rev-parse --git-dir &>/dev/null || return
|
||||
local branch
|
||||
for branch in main trunk; do
|
||||
for branch in dev devel development; do
|
||||
if command git show-ref -q --verify refs/heads/$branch; then
|
||||
echo $branch
|
||||
return
|
||||
fi
|
||||
done
|
||||
echo master
|
||||
echo develop
|
||||
}
|
||||
|
||||
#
|
||||
@@ -60,7 +73,7 @@ alias gapt='git apply --3way'
|
||||
alias gb='git branch'
|
||||
alias gba='git branch -a'
|
||||
alias gbd='git branch -d'
|
||||
alias gbda='git branch --no-color --merged | command grep -vE "^(\+|\*|\s*($(git_main_branch)|development|develop|devel|dev)\s*$)" | command xargs -n 1 git branch -d'
|
||||
alias gbda='git branch --no-color --merged | command grep -vE "^([+*]|\s*($(git_main_branch)|$(git_develop_branch))\s*$)" | command xargs git branch -d 2>/dev/null'
|
||||
alias gbD='git branch -D'
|
||||
alias gbl='git blame -b -w'
|
||||
alias gbnm='git branch --no-merged'
|
||||
@@ -88,14 +101,17 @@ alias gcl='git clone --recurse-submodules'
|
||||
alias gclean='git clean -id'
|
||||
alias gpristine='git reset --hard && git clean -dffx'
|
||||
alias gcm='git checkout $(git_main_branch)'
|
||||
alias gcd='git checkout develop'
|
||||
alias gcd='git checkout $(git_develop_branch)'
|
||||
alias gcmsg='git commit -m'
|
||||
alias gco='git checkout'
|
||||
alias gcor='git checkout --recurse-submodules'
|
||||
alias gcount='git shortlog -sn'
|
||||
alias gcp='git cherry-pick'
|
||||
alias gcpa='git cherry-pick --abort'
|
||||
alias gcpc='git cherry-pick --continue'
|
||||
alias gcs='git commit -S'
|
||||
alias gcss='git commit -S -s'
|
||||
alias gcssm='git commit -S -s -m'
|
||||
|
||||
alias gd='git diff'
|
||||
alias gdca='git diff --cached'
|
||||
@@ -103,6 +119,7 @@ alias gdcw='git diff --cached --word-diff'
|
||||
alias gdct='git describe --tags $(git rev-list --tags --max-count=1)'
|
||||
alias gds='git diff --staged'
|
||||
alias gdt='git diff-tree --no-commit-id --name-only -r'
|
||||
alias gdu='git diff @{u}'
|
||||
alias gdw='git diff --word-diff'
|
||||
|
||||
function gdnolock() {
|
||||
@@ -194,19 +211,19 @@ alias glgg='git log --graph'
|
||||
alias glgga='git log --graph --decorate --all'
|
||||
alias glgm='git log --graph --max-count=10'
|
||||
alias glo='git log --oneline --decorate'
|
||||
alias glol="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset'"
|
||||
alias glols="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --stat"
|
||||
alias glol="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset'"
|
||||
alias glols="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset' --stat"
|
||||
alias glod="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset'"
|
||||
alias glods="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ad) %C(bold blue)<%an>%Creset' --date=short"
|
||||
alias glola="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --all"
|
||||
alias glola="git log --graph --pretty='%Cred%h%Creset -%C(auto)%d%Creset %s %Cgreen(%ar) %C(bold blue)<%an>%Creset' --all"
|
||||
alias glog='git log --oneline --decorate --graph'
|
||||
alias gloga='git log --oneline --decorate --graph --all'
|
||||
alias glp="_git_log_prettily"
|
||||
|
||||
alias gm='git merge'
|
||||
alias gmom='git merge origin/$(git_main_branch)'
|
||||
alias gmt='git mergetool --no-prompt'
|
||||
alias gmtvim='git mergetool --no-prompt --tool=vimdiff'
|
||||
alias gmtl='git mergetool --no-prompt'
|
||||
alias gmtlvim='git mergetool --no-prompt --tool=vimdiff'
|
||||
alias gmum='git merge upstream/$(git_main_branch)'
|
||||
alias gma='git merge --abort'
|
||||
|
||||
@@ -215,6 +232,7 @@ alias gpd='git push --dry-run'
|
||||
alias gpf='git push --force-with-lease'
|
||||
alias gpf!='git push --force'
|
||||
alias gpoat='git push origin --all && git push origin --tags'
|
||||
alias gpr='git pull --rebase'
|
||||
alias gpu='git push upstream'
|
||||
alias gpv='git push -v'
|
||||
|
||||
@@ -223,7 +241,7 @@ alias gra='git remote add'
|
||||
alias grb='git rebase'
|
||||
alias grba='git rebase --abort'
|
||||
alias grbc='git rebase --continue'
|
||||
alias grbd='git rebase develop'
|
||||
alias grbd='git rebase $(git_develop_branch)'
|
||||
alias grbi='git rebase -i'
|
||||
alias grbm='git rebase $(git_main_branch)'
|
||||
alias grbo='git rebase --onto'
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
This plugin supports working with GitHub from the command line. It provides a few things:
|
||||
|
||||
* Sets up the `hub` wrapper and completions for the `git` command if you have `hub` installed.
|
||||
* Completion for the `github` Ruby gem.
|
||||
* Sets up the `hub` wrapper and completions for the `git` command if you have [`hub`](https://github.com/github/hub) installed.
|
||||
* Completion for the [`github` Ruby gem](https://github.com/defunkt/github-gem).
|
||||
* Convenience functions for working with repos and URLs.
|
||||
|
||||
### Functions
|
||||
|
||||
@@ -36,7 +36,7 @@ __gnu_utils() {
|
||||
gcmds+=('gfind' 'gxargs' 'glocate')
|
||||
|
||||
# Not part of either coreutils or findutils, installed separately.
|
||||
gcmds+=('gsed' 'gtar' 'gtime' 'gmake')
|
||||
gcmds+=('gsed' 'gtar' 'gtime' 'gmake' 'ggrep')
|
||||
|
||||
for gcmd in "${gcmds[@]}"; do
|
||||
# Do nothing if the command isn't found
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
The go plugin is deprecated. Use the [golang plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/golang) instead.
|
||||
@@ -1,3 +0,0 @@
|
||||
print -P "%F{yellow}The 'go' plugin is deprecated. Use the '%Ugolang%u' plugin instead.%f"
|
||||
|
||||
source "$ZSH/plugins/golang/golang.plugin.zsh"
|
||||
@@ -19,6 +19,7 @@ plugins=(... golang)
|
||||
| gof | `go fmt` | Gofmt formats (aligns and indents) Go programs. |
|
||||
| gofa | `go fmt ./...` | Run go fmt for all packages in current directory, recursively |
|
||||
| gog | `go get` | Downloads packages and then installs them to $GOPATH |
|
||||
| gog | `go get ./...` | Installs all dependencies in current directory, recursively |
|
||||
| goi | `go install` | Compiles and installs packages to $GOPATH |
|
||||
| gol | `go list` | Lists Go packages |
|
||||
| gom | `go mod` | Access to operations on modules |
|
||||
@@ -27,4 +28,5 @@ plugins=(... golang)
|
||||
| gops | `cd $GOPATH/src` | Takes you to $GOPATH/src |
|
||||
| gor | `go run` | Compiles and runs your code |
|
||||
| got | `go test` | Runs tests |
|
||||
| gota | `go test ./...` | Runs tests in all subdirectories |
|
||||
| gov | `go vet` | Vet examines Go source code and reports suspicious constructs |
|
||||
|
||||
@@ -263,6 +263,7 @@ alias god='go doc'
|
||||
alias gof='go fmt'
|
||||
alias gofa='go fmt ./...'
|
||||
alias gog='go get'
|
||||
alias goga='go get ./...'
|
||||
alias goi='go install'
|
||||
alias gol='go list'
|
||||
alias gom='go mod'
|
||||
@@ -271,4 +272,5 @@ alias gopb='cd $GOPATH/bin'
|
||||
alias gops='cd $GOPATH/src'
|
||||
alias gor='go run'
|
||||
alias got='go test'
|
||||
alias gota='go test ./...'
|
||||
alias gov='go vet'
|
||||
|
||||
10
zsh/plugins/invoke/README.md
Normal file
10
zsh/plugins/invoke/README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Invoke plugin
|
||||
|
||||
This plugin adds completion for [invoke](https://github.com/pyinvoke/invoke).
|
||||
|
||||
To use it, add `invoke` to the plugins array in your `~/.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... invoke)
|
||||
```
|
||||
|
||||
5
zsh/plugins/invoke/invoke.plugin.zsh
Normal file
5
zsh/plugins/invoke/invoke.plugin.zsh
Normal file
@@ -0,0 +1,5 @@
|
||||
# Autocompletion for invoke.
|
||||
#
|
||||
if [ $commands[invoke] ]; then
|
||||
source <(invoke --print-completion-script=zsh)
|
||||
fi
|
||||
22
zsh/plugins/isodate/README.md
Normal file
22
zsh/plugins/isodate/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Isodate plugin
|
||||
|
||||
**Maintainer:** [@Frani](https://github.com/frani)
|
||||
|
||||
This plugin adds completion for the [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601),
|
||||
as well as some aliases for common Date commands.
|
||||
|
||||
To use it, add `isodate` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... isodate)
|
||||
```
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------------|--------------------------------------|----------------------------------------------------------------------------|
|
||||
| isodate | `date +%Y-%m-%dT%H:%M:%S%z` | Display the current date with UTC offset and ISO 8601-2 extended format |
|
||||
| isodate_utc | `date -u +%Y-%m-%dT%H:%M:%SZ` | Display the current date in UTC and ISO 8601-2 extended format |
|
||||
| isodate_basic | `date -u +%Y%m%dT%H%M%SZ` | Display the current date in UTC and ISO 8601 basic format |
|
||||
| unixstamp | `date +%s` | Display the current date as a Unix timestamp (seconds since the Unix epoch)|
|
||||
| date_locale | `date +"%c"` | Display the current date using the default locale's format |
|
||||
7
zsh/plugins/isodate/isodate.plugin.zsh
Normal file
7
zsh/plugins/isodate/isodate.plugin.zsh
Normal file
@@ -0,0 +1,7 @@
|
||||
# work with date ISO 8601 easy
|
||||
|
||||
alias isodate="date +%Y-%m-%dT%H:%M:%S%z"
|
||||
alias isodate_utc="date -u +%Y-%m-%dT%H:%M:%SZ"
|
||||
alias isodate_basic="date -u +%Y%m%dT%H%M%SZ"
|
||||
alias unixstamp="date +%s"
|
||||
alias date_locale="date +"%c""
|
||||
@@ -107,7 +107,6 @@ plugins=(... kubectl)
|
||||
| ksss | `kubectl scale statefulset` | Scale a statefulset |
|
||||
| krsss | `kubectl rollout status statefulset`| Check the rollout status of a deployment |
|
||||
| | | **Service Accounts management** |
|
||||
| kgsa | `kubectl get sa` | List all service accounts |
|
||||
| kdsa | `kubectl describe sa` | Describe a service account in details |
|
||||
| kdelsa | `kubectl delete sa` | Delete the service account |
|
||||
| | | **DaemonSet management** |
|
||||
|
||||
@@ -43,6 +43,7 @@ alias kgpwide='kgp -o wide'
|
||||
alias kep='kubectl edit pods'
|
||||
alias kdp='kubectl describe pods'
|
||||
alias kdelp='kubectl delete pods'
|
||||
alias kgpall='kubectl get pods --all-namespaces -o wide'
|
||||
|
||||
# get pod by label: kgpl "app=myapp" -n myns
|
||||
alias kgpl='kgp -l'
|
||||
@@ -151,7 +152,6 @@ alias kdpvc='kubectl describe pvc'
|
||||
alias kdelpvc='kubectl delete pvc'
|
||||
|
||||
# Service account management.
|
||||
alias kgsa="kubectl get sa"
|
||||
alias kdsa="kubectl describe sa"
|
||||
alias kdelsa="kubectl delete sa"
|
||||
|
||||
|
||||
26
zsh/plugins/kubectx/README.md
Normal file
26
zsh/plugins/kubectx/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# kubectx - show active kubectl context
|
||||
|
||||
This plugins adds ```kubectx_prompt_info()``` function. It shows name of the
|
||||
active kubectl context (```kubectl config current-context```).
|
||||
|
||||
You can use it to customize prompt and know if You are on prod cluster ;)
|
||||
|
||||
_Example_. Add to **.zshrc**:
|
||||
|
||||
```
|
||||
RPS1='$(kubectx_prompt_info)'
|
||||
```
|
||||
|
||||
### custom ctx names
|
||||
|
||||
One can rename default context name for better readability.
|
||||
|
||||
_Example_. Add to **.zshrc**:
|
||||
```
|
||||
kubectx_mapping[minikube]="mini"
|
||||
kubectx_mapping[context_name_from_kubeconfig]="$emoji[wolf_face]"
|
||||
kubectx_mapping[production_cluster]="%{$fg[yellow]%}prod!%{$reset_color%}"
|
||||
```
|
||||
|
||||

|
||||

|
||||
9
zsh/plugins/kubectx/kubectx.plugin.zsh
Normal file
9
zsh/plugins/kubectx/kubectx.plugin.zsh
Normal file
@@ -0,0 +1,9 @@
|
||||
typeset -A kubectx_mapping
|
||||
|
||||
function kubectx_prompt_info() {
|
||||
if [ $commands[kubectl] ]; then
|
||||
local current_ctx=`kubectl config current-context`
|
||||
# use value in associative array if it exists, otherwise fall back to the context name
|
||||
echo "${kubectx_mapping[$current_ctx]:-$current_ctx}"
|
||||
fi
|
||||
}
|
||||
BIN
zsh/plugins/kubectx/prod.png
Normal file
BIN
zsh/plugins/kubectx/prod.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.7 KiB |
BIN
zsh/plugins/kubectx/stage.png
Normal file
BIN
zsh/plugins/kubectx/stage.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.7 KiB |
@@ -8,6 +8,7 @@ function artisan \
|
||||
drush \
|
||||
gulp \
|
||||
npm \
|
||||
php \
|
||||
wp \
|
||||
yarn {
|
||||
if checkForLandoFile; then
|
||||
|
||||
@@ -19,37 +19,38 @@ alias hglr='hg pull --rebase'
|
||||
alias hgo='hg outgoing'
|
||||
|
||||
function in_hg() {
|
||||
if [[ -d .hg ]] || $(hg summary > /dev/null 2>&1); then
|
||||
if $(hg branch > /dev/null 2>&1); then
|
||||
echo 1
|
||||
fi
|
||||
}
|
||||
|
||||
function hg_get_branch_name() {
|
||||
if [ $(in_hg) ]; then
|
||||
echo $(hg branch)
|
||||
branch=`hg branch 2>/dev/null`
|
||||
if [ $? -eq 0 ]; then
|
||||
echo $branch
|
||||
fi
|
||||
unset branch
|
||||
}
|
||||
|
||||
function hg_prompt_info {
|
||||
if [ $(in_hg) ]; then
|
||||
_DISPLAY=$(hg_get_branch_name)
|
||||
_DISPLAY=`hg branch 2>/dev/null`
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_HG_PROMPT_PREFIX\
|
||||
$ZSH_THEME_REPO_NAME_COLOR$_DISPLAY$ZSH_PROMPT_BASE_COLOR$ZSH_PROMPT_BASE_COLOR$(hg_dirty)$ZSH_THEME_HG_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR"
|
||||
unset _DISPLAY
|
||||
fi
|
||||
unset _DISPLAY
|
||||
}
|
||||
|
||||
function hg_dirty_choose {
|
||||
if [ $(in_hg) ]; then
|
||||
hg status 2> /dev/null | command grep -Eq '^\s*[ACDIM!?L]'
|
||||
hg status -mar 2> /dev/null | command grep -Eq '^\s*[ACDIM!?L]'
|
||||
if [ $? -eq 0 ]; then
|
||||
if [ $pipestatus[-1] -eq 0 ]; then
|
||||
# Grep exits with 0 when "One or more lines were selected", return "dirty".
|
||||
echo $1
|
||||
else
|
||||
# Otherwise, no lines were found, or an error occurred. Return clean.
|
||||
echo $2
|
||||
return
|
||||
fi
|
||||
fi
|
||||
echo $2
|
||||
}
|
||||
|
||||
function hg_dirty {
|
||||
@@ -57,9 +58,15 @@ function hg_dirty {
|
||||
}
|
||||
|
||||
function hgic() {
|
||||
hg incoming "$@" | grep "changeset" | wc -l
|
||||
hg incoming "$@" | grep "changeset" | wc -l
|
||||
}
|
||||
|
||||
function hgoc() {
|
||||
hg outgoing "$@" | grep "changeset" | wc -l
|
||||
hg outgoing "$@" | grep "changeset" | wc -l
|
||||
}
|
||||
|
||||
function hg_get_bookmark_name() {
|
||||
if [ $(in_hg) ]; then
|
||||
echo $(hg id -B)
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -21,7 +21,11 @@ These settings should go in your zshrc file, before Oh My Zsh is sourced:
|
||||
|
||||
- **`NVM_LAZY`**: if you want the plugin to defer the load of nvm to speed-up the start of your zsh session,
|
||||
set `NVM_LAZY` to `1`. This will use the `--no-use` parameter when loading nvm, and will create a function
|
||||
for `node`, `npm` and `yarn`, so when you call either of these three, nvm will load with `nvm use default`.
|
||||
for `node`, `npm`, `yarn`, and the command(s) specified by `NVM_LAZY_CMD`, so when you call either of them,
|
||||
nvm will load with `nvm use default`.
|
||||
|
||||
- **`NVM_LAZY_CMD`**: if you want additional command(s) to trigger lazy loading of nvm, set `NVM_LAZY_CMD` to
|
||||
the command or an array of the commands.
|
||||
|
||||
- **`NVM_AUTOLOAD`**: if `NVM_AUTOLOAD` is set to `1`, the plugin will automatically load a node version when
|
||||
if finds a [`.nvmrc` file](https://github.com/nvm-sh/nvm#nvmrc) in the current working directory indicating
|
||||
|
||||
@@ -28,8 +28,8 @@ fi
|
||||
|
||||
# Call nvm when first using node, npm or yarn
|
||||
if (( $+NVM_LAZY )); then
|
||||
function node npm yarn {
|
||||
unfunction node npm yarn
|
||||
function node npm yarn $NVM_LAZY_CMD {
|
||||
unfunction node npm yarn $NVM_LAZY_CMD
|
||||
nvm use default
|
||||
command "$0" "$@"
|
||||
}
|
||||
|
||||
12
zsh/plugins/octozen/README.md
Normal file
12
zsh/plugins/octozen/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# Octozen plugin
|
||||
|
||||
Displays a zen quote from GitHub's Octocat on start up.
|
||||
|
||||
To use it, add `octozen` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... octozen)
|
||||
```
|
||||
|
||||
It defines a `display_octozen` function that fetches a GitHub Octocat zen quote.
|
||||
NOTE: Internet connection is required (will time out if not fetched in 2 seconds).
|
||||
11
zsh/plugins/octozen/octozen.plugin.zsh
Normal file
11
zsh/plugins/octozen/octozen.plugin.zsh
Normal file
@@ -0,0 +1,11 @@
|
||||
# octozen plugin
|
||||
|
||||
# Displays a zen quote from octocat
|
||||
function display_octozen() {
|
||||
curl -m 2 -fsL "https://api.github.com/octocat"
|
||||
add-zsh-hook -d precmd display_octozen
|
||||
}
|
||||
|
||||
# Display the octocat on the first precmd, after the whole starting process has finished
|
||||
autoload -Uz add-zsh-hook
|
||||
add-zsh-hook precmd display_octozen
|
||||
@@ -17,3 +17,12 @@ or you can run `zsh-pip-cache-packages` directly.
|
||||
|
||||
To reset the cache, run `zsh-pip-clear-cache` and it will be rebuilt next
|
||||
the next time you autocomplete `pip install`.
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Description |
|
||||
| :------- | :-------------------------------------------- |
|
||||
| pipreq | Create requirements file |
|
||||
| pipir | Install packages from `requirements.txt` file |
|
||||
| pipupall | Update all installed packages |
|
||||
| pipunall | Uninstall all installed packages |
|
||||
|
||||
@@ -84,3 +84,14 @@ zsh-pip-test-clean-packages() {
|
||||
|
||||
alias pip="noglob pip" # allows square brackets for pip command invocation
|
||||
|
||||
# Create requirements file
|
||||
alias pipreq="pip freeze > requirements.txt"
|
||||
|
||||
# Update all installed packages
|
||||
alias pipupall="pipreq && sed -i 's/==/>=/g' requirements.txt && pip install -r requirements.txt --upgrade && rm -rf requirements.txt"
|
||||
|
||||
# Install packages from requirements file
|
||||
alias pipir="pip install -r requirements.txt"
|
||||
|
||||
# Uninstalled all installed packages
|
||||
alias pipunall="pipreq && pip uninstall -r requirements.txt -y && rm -rf requirements.txt"
|
||||
|
||||
19
zsh/plugins/pm2/README.md
Normal file
19
zsh/plugins/pm2/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# pm2 plugin
|
||||
|
||||
The plugin adds several aliases and completions for common [pm2](http://pm2.keymetrics.io/) commands.
|
||||
|
||||
To use it, add `pm2` to the plugins array of your zshrc file:
|
||||
```
|
||||
plugins=(... pm2)
|
||||
```
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command |
|
||||
|--------|----------------------|
|
||||
| p2s | `pm2 start` |
|
||||
| p2o | `pm2 stop` |
|
||||
| p2d | `pm2 delete` |
|
||||
| p2r | `pm2 restart` |
|
||||
| p2i | `pm2 list` |
|
||||
| p2l | `pm2 logs` |
|
||||
168
zsh/plugins/pm2/_pm2
Normal file
168
zsh/plugins/pm2/_pm2
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/bin/zsh -f
|
||||
#compdef pm2
|
||||
#autoload
|
||||
|
||||
local -a _1st_arguments
|
||||
|
||||
_1st_arguments=(
|
||||
"start:start and daemonize an app"
|
||||
"trigger:trigger process action"
|
||||
"deploy:deploy your json"
|
||||
"startOrRestart:start or restart JSON file"
|
||||
"startOrReload:start or gracefully reload JSON file"
|
||||
"pid:return pid of [app_name] or all"
|
||||
"stop:stop a process"
|
||||
"restart:restart a process"
|
||||
"scale:scale up/down a process in cluster mode depending on total_number param"
|
||||
"profile\:mem:Sample PM2 heap memory"
|
||||
"profile\:cpu:Profile PM2 cpu"
|
||||
"reload:reload processes (note that its for app using HTTP/HTTPS)"
|
||||
"id:get process id by name"
|
||||
"inspect:inspect a process"
|
||||
"delete:stop and delete a process from pm2 process list"
|
||||
"sendSignal:send a system signal to the target process"
|
||||
"ping:ping pm2 daemon - if not up it will launch it"
|
||||
"updatePM2:update in-memory PM2 with local PM2"
|
||||
"install:install or update a module and run it forever"
|
||||
"module\:update:update a module and run it forever"
|
||||
"module\:generate:Generate a sample module in current folder"
|
||||
"uninstall:stop and uninstall a module"
|
||||
"package:Check & Package TAR type module"
|
||||
"publish:Publish the module you are currently on"
|
||||
"set:sets the specified config <key> <value>"
|
||||
"multiset:multiset eg \"key1 val1 key2 val2\""
|
||||
"get:get value for <key>"
|
||||
"config:get / set module config values"
|
||||
"unset:clears the specified config <key>"
|
||||
"report:give a full pm2 report for https\://github.com/Unitech/pm2/issues"
|
||||
"link:link with the pm2 monitoring dashboard"
|
||||
"unlink:unlink with the pm2 monitoring dashboard"
|
||||
"monitor:monitor target process"
|
||||
"unmonitor:unmonitor target process"
|
||||
"open:open the pm2 monitoring dashboard"
|
||||
"plus:enable pm2 plus"
|
||||
"login:Login to pm2 plus"
|
||||
"logout:Logout from pm2 plus"
|
||||
"web:launch a health API on 0.0.0.0\:9615"
|
||||
"dump:dump all processes for resurrecting them later"
|
||||
"cleardump:Create empty dump file"
|
||||
"send:send stdin to <pm_id>"
|
||||
"attach:attach stdin/stdout to application identified by <pm_id>"
|
||||
"resurrect:resurrect previously dumped processes"
|
||||
"unstartup:disable the pm2 startup hook"
|
||||
"startup:enable the pm2 startup hook"
|
||||
"logrotate:copy default logrotate configuration"
|
||||
"ecosystem:generate a process conf file. (mode = null or simple)"
|
||||
"reset:reset counters for process"
|
||||
"describe:describe all parameters of a process id"
|
||||
"list:list all processes"
|
||||
"jlist:list all processes in JSON format"
|
||||
"prettylist:print json in a prettified JSON"
|
||||
"monit:launch termcaps monitoring"
|
||||
"imonit:launch legacy termcaps monitoring"
|
||||
"dashboard:launch dashboard with monitoring and logs"
|
||||
"flush:flush logs"
|
||||
"reloadLogs:reload all logs"
|
||||
"logs:stream logs file. Default stream all logs"
|
||||
"kill:kill daemon"
|
||||
"pull:updates repository for a given app"
|
||||
"forward:updates repository to the next commit for a given app"
|
||||
"backward:downgrades repository to the previous commit for a given app"
|
||||
"deepUpdate:performs a deep update of PM2"
|
||||
"serve:serve a directory over http via port"
|
||||
"examples:display pm2 usage examples"
|
||||
)
|
||||
|
||||
local -a id_names
|
||||
|
||||
_id_names() {
|
||||
local app_list
|
||||
app_list=`pm2 list -m`
|
||||
|
||||
local -a names ids
|
||||
names=(`echo $app_list | grep '+---' | awk '{print $2}'`)
|
||||
ids=(`echo $app_list | grep 'pm2 id' | awk '{print $4}'`)
|
||||
|
||||
if (( ${#ids} > 0 )); then
|
||||
for i in {1..${#ids}}; do
|
||||
id_names+=( "${ids[i]}:${names[i]}" )
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
_arguments \
|
||||
'(-v --version)'{-v,--version}'[output version]' \
|
||||
'(-h --help)'{-h,--help}'[output usage information]' \
|
||||
'*:: :->subcmds' && return 0
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe "command" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
||||
local -a id_comp id_all_comp id_all_files_comp start_options logs_options
|
||||
id_comp=('1: :->id_comp')
|
||||
id_all_comp=('1: :->id_all_comp')
|
||||
id_all_files_comp=('1: :->id_all_files_comp')
|
||||
start_options=(
|
||||
'--watch[Watch folder for changes]'
|
||||
'--fresh[Rebuild Dockerfile]'
|
||||
'--daemon[Run container in Daemon mode (debug purposes)]'
|
||||
'--container[Start application in container mode]'
|
||||
'--dist[with --container; change local Dockerfile to containerize all files in current directory]'
|
||||
'--image-name[with --dist; set the exported image name]'
|
||||
'--node-version[with --container, set a specific major Node.js version]'
|
||||
'--dockerdaemon[for debugging purpose]'
|
||||
'(-h --help)'{-h,--help}'[output usage information]'
|
||||
$id_all_files_comp
|
||||
)
|
||||
logs_options=(
|
||||
'--json[json log output]'
|
||||
'--format[formated log output]'
|
||||
'--raw[raw output]'
|
||||
'--err[only shows error output]'
|
||||
'--out[only shows standard output]'
|
||||
'--lines[output the last N lines, instead of the last 15 by default]'
|
||||
'--timestamp[add timestamps (default format YYYY-MM-DD-HH:mm:ss)]'
|
||||
'--nostream[print logs without lauching the log stream]'
|
||||
'(-h --help)'{-h,--help}'[output usage information]'
|
||||
$id_all_comp
|
||||
)
|
||||
|
||||
case "$words[1]" in
|
||||
start)
|
||||
_arguments $start_options && return 0
|
||||
;;
|
||||
logs)
|
||||
_arguments $logs_options && return 0
|
||||
;;
|
||||
stop|restart|delete|reload|reset)
|
||||
_arguments $id_all_comp && return 0
|
||||
;;
|
||||
env|inspect|monitor|unmonitor|discribe)
|
||||
_arguments $id_comp && return 0
|
||||
;;
|
||||
deploy|startOrRestart|startOrReload)
|
||||
_files ;;
|
||||
esac
|
||||
|
||||
case "$state" in
|
||||
id_comp)
|
||||
_id_names
|
||||
_alternative \
|
||||
'args:app args:(($id_names))'
|
||||
;;
|
||||
id_all_comp)
|
||||
_id_names
|
||||
id_names+=(all)
|
||||
_alternative \
|
||||
'args:app args:(($id_names))'
|
||||
;;
|
||||
id_all_files_comp)
|
||||
_id_names
|
||||
id_names+=(all)
|
||||
_alternative \
|
||||
'args:app args:(($id_names))' \
|
||||
'files:filename:_files'
|
||||
;;
|
||||
esac
|
||||
6
zsh/plugins/pm2/pm2.plugin.zsh
Normal file
6
zsh/plugins/pm2/pm2.plugin.zsh
Normal file
@@ -0,0 +1,6 @@
|
||||
alias p2s='pm2 start'
|
||||
alias p2o='pm2 stop'
|
||||
alias p2d='pm2 delete'
|
||||
alias p2r='pm2 restart'
|
||||
alias p2i='pm2 list'
|
||||
alias p2l='pm2 logs'
|
||||
@@ -1,4 +1,4 @@
|
||||
# pyenv
|
||||
# pyenv
|
||||
|
||||
This plugin looks for [pyenv](https://github.com/pyenv/pyenv), a Simple Python version
|
||||
management system, and loads it if it's found. It also loads pyenv-virtualenv, a pyenv
|
||||
@@ -10,6 +10,14 @@ To use it, add `pyenv` to the plugins array in your zshrc file:
|
||||
plugins=(... pyenv)
|
||||
```
|
||||
|
||||
## Settings
|
||||
|
||||
- `ZSH_PYENV_QUIET`: if set to `true`, the plugin will not print any messages if it
|
||||
finds that `pyenv` is not properly configured.
|
||||
|
||||
- `ZSH_PYENV_VIRTUALENV`: if set to `false`, the plugin will not load pyenv-virtualenv
|
||||
when it finds it.
|
||||
|
||||
## Functions
|
||||
|
||||
- `pyenv_prompt_info`: displays the Python version in use by pyenv; or the global Python
|
||||
|
||||
@@ -1,46 +1,96 @@
|
||||
pyenv_config_warning() {
|
||||
[[ "$ZSH_PYENV_QUIET" != true ]] || return 0
|
||||
|
||||
local reason="$1"
|
||||
local pyenv_root="${PYENV_ROOT/#$HOME/\$HOME}"
|
||||
cat >&2 <<EOF
|
||||
Found pyenv, but it is badly configured ($reason). pyenv might not
|
||||
work correctly for non-interactive shells (for example, when run from a script).
|
||||
${(%):-"%B%F{yellow}"}
|
||||
To fix this message, add these lines to the '.profile' and '.zprofile' files
|
||||
in your home directory:
|
||||
${(%):-"%f"}
|
||||
export PYENV_ROOT="$pyenv_root"
|
||||
export PATH="\$PYENV_ROOT/bin:\$PATH"
|
||||
eval "\$(pyenv init --path)"
|
||||
${(%):-"%F{yellow}"}
|
||||
You'll need to restart your user session for the changes to take effect.${(%):-%b%f}
|
||||
For more information go to https://github.com/pyenv/pyenv/#installation.
|
||||
EOF
|
||||
}
|
||||
|
||||
# This plugin loads pyenv into the current shell and provides prompt info via
|
||||
# the 'pyenv_prompt_info' function. Also loads pyenv-virtualenv if available.
|
||||
|
||||
# Load pyenv only if command not already available
|
||||
if command -v pyenv &> /dev/null && [[ "$(uname -r)" != *icrosoft* ]]; then
|
||||
FOUND_PYENV=1
|
||||
# Look for pyenv in $PATH and verify that it's not a part of pyenv-win in WSL
|
||||
if ! command -v pyenv &>/dev/null; then
|
||||
FOUND_PYENV=0
|
||||
elif [[ "${commands[pyenv]}" = */pyenv-win/* && "$(uname -r)" = *icrosoft* ]]; then
|
||||
FOUND_PYENV=0
|
||||
else
|
||||
FOUND_PYENV=0
|
||||
FOUND_PYENV=1
|
||||
fi
|
||||
|
||||
# Look for pyenv and try to load it (will only work on interactive shells)
|
||||
if [[ $FOUND_PYENV -ne 1 ]]; then
|
||||
pyenvdirs=("$HOME/.pyenv" "/usr/local/pyenv" "/opt/pyenv" "/usr/local/opt/pyenv")
|
||||
for dir in $pyenvdirs; do
|
||||
if [[ -d $dir/bin ]]; then
|
||||
export PATH="$PATH:$dir/bin"
|
||||
FOUND_PYENV=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ $FOUND_PYENV -ne 1 ]]; then
|
||||
if (( $+commands[brew] )) && dir=$(brew --prefix pyenv 2>/dev/null); then
|
||||
if [[ -d $dir/bin ]]; then
|
||||
export PATH="$PATH:$dir/bin"
|
||||
FOUND_PYENV=1
|
||||
fi
|
||||
pyenvdirs=("$HOME/.pyenv" "/usr/local/pyenv" "/opt/pyenv" "/usr/local/opt/pyenv")
|
||||
for dir in $pyenvdirs; do
|
||||
if [[ -d "$dir/bin" ]]; then
|
||||
FOUND_PYENV=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $FOUND_PYENV -ne 1 ]]; then
|
||||
if (( $+commands[brew] )) && dir=$(brew --prefix pyenv 2>/dev/null); then
|
||||
if [[ -d "$dir/bin" ]]; then
|
||||
FOUND_PYENV=1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# If we found pyenv, load it but show a caveat about non-interactive shells
|
||||
if [[ $FOUND_PYENV -eq 1 ]]; then
|
||||
# Configuring in .zshrc only makes pyenv available for interactive shells
|
||||
export PYENV_ROOT="$dir"
|
||||
export PATH="$PYENV_ROOT/bin:$PATH"
|
||||
eval "$(pyenv init --path)"
|
||||
|
||||
# Show warning due to bad pyenv configuration
|
||||
pyenv_config_warning 'pyenv command not found in $PATH'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $FOUND_PYENV -eq 1 ]]; then
|
||||
eval "$(pyenv init - --no-rehash zsh)"
|
||||
if (( $+commands[pyenv-virtualenv-init] )); then
|
||||
eval "$(pyenv virtualenv-init - zsh)"
|
||||
fi
|
||||
function pyenv_prompt_info() {
|
||||
echo "$(pyenv version-name)"
|
||||
}
|
||||
if [[ -z "$PYENV_ROOT" ]]; then
|
||||
# This is only for backwards compatibility with users that previously relied
|
||||
# on this plugin exporting it. pyenv itself does not require it to be exported
|
||||
export PYENV_ROOT="$(pyenv root)"
|
||||
fi
|
||||
|
||||
# Add pyenv shims to $PATH if not already added
|
||||
if [[ -z "${path[(Re)$(pyenv root)/shims]}" ]]; then
|
||||
eval "$(pyenv init --path)"
|
||||
pyenv_config_warning 'missing pyenv shims in $PATH'
|
||||
fi
|
||||
|
||||
# Load pyenv
|
||||
eval "$(pyenv init - --no-rehash zsh)"
|
||||
|
||||
# If pyenv-virtualenv exists, load it
|
||||
if [[ -d "$(pyenv root)/plugins/pyenv-virtualenv" && "$ZSH_PYENV_VIRTUALENV" != false ]]; then
|
||||
eval "$(pyenv virtualenv-init - zsh)"
|
||||
fi
|
||||
|
||||
function pyenv_prompt_info() {
|
||||
echo "$(pyenv version-name)"
|
||||
}
|
||||
else
|
||||
# fallback to system python
|
||||
function pyenv_prompt_info() {
|
||||
echo "system: $(python -V 2>&1 | cut -f 2 -d ' ')"
|
||||
}
|
||||
# Fall back to system python
|
||||
function pyenv_prompt_info() {
|
||||
echo "system: $(python -V 2>&1 | cut -f 2 -d ' ')"
|
||||
}
|
||||
fi
|
||||
|
||||
unset FOUND_PYENV pyenvdirs dir
|
||||
unfunction pyenv_config_warning
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# python command
|
||||
alias py='python'
|
||||
|
||||
# Find python file
|
||||
alias pyfind='find . -name "*.py"'
|
||||
|
||||
|
||||
@@ -52,10 +52,14 @@ alias ru='rails runner'
|
||||
alias rs='rails server'
|
||||
alias rsd='rails server --debugger'
|
||||
alias rsp='rails server --port'
|
||||
alias rsb='rails server --bind'
|
||||
|
||||
# Rake aliases
|
||||
alias rdm='rake db:migrate'
|
||||
alias rdmr='rake db:migrate:redo'
|
||||
alias rdmd='rake db:migrate:down'
|
||||
alias rdms='rake db:migrate:status'
|
||||
alias rdmu='rake db:migrate:up'
|
||||
alias rdr='rake db:rollback'
|
||||
alias rdc='rake db:create'
|
||||
alias rds='rake db:seed'
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
22
zsh/plugins/rustup/rustup.plugin.zsh
Normal file
22
zsh/plugins/rustup/rustup.plugin.zsh
Normal file
@@ -0,0 +1,22 @@
|
||||
if (( $+commands[rustup] )); then
|
||||
# remove old generated completion file
|
||||
command rm -f "${0:A:h}/_rustup"
|
||||
|
||||
ver="$(rustup --version 2>/dev/null)"
|
||||
ver_file="$ZSH_CACHE_DIR/rustup_version"
|
||||
comp_file="$ZSH_CACHE_DIR/completions/_rustup"
|
||||
|
||||
mkdir -p "${comp_file:h}"
|
||||
(( ${fpath[(Ie)${comp_file:h}]} )) || fpath=("${comp_file:h}" $fpath)
|
||||
|
||||
if [[ ! -f "$comp_file" || ! -f "$ver_file" || "$ver" != "$(< "$ver_file")" ]]; then
|
||||
rustup completions zsh >| "$comp_file"
|
||||
echo "$ver" >| "$ver_file"
|
||||
fi
|
||||
|
||||
declare -A _comps
|
||||
autoload -Uz _rustup
|
||||
_comps[rustup]=_rustup
|
||||
|
||||
unset ver ver_file comp_file
|
||||
fi
|
||||
5
zsh/plugins/samtools/README.md
Normal file
5
zsh/plugins/samtools/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Samtools plugin
|
||||
|
||||
This plugin adds support for [samtools](http://www.htslib.org/):
|
||||
|
||||
* Adds autocomplete options for all samtools sub commands.
|
||||
40
zsh/plugins/samtools/_samtools
Normal file
40
zsh/plugins/samtools/_samtools
Normal file
@@ -0,0 +1,40 @@
|
||||
#compdef samtools
|
||||
#autoload
|
||||
|
||||
local curcontext="$curcontext" state line ret=1
|
||||
local -a _files
|
||||
|
||||
_arguments -C \
|
||||
'1: :->cmds' \
|
||||
'2:: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
_values "samtools command" \
|
||||
"view[SAM<->BAM conversion]" \
|
||||
"sort[sort alignment file]" \
|
||||
"mpileup[multi-way pileup]" \
|
||||
"depth[compute the depth]" \
|
||||
"faidx[index/extract FASTA]" \
|
||||
"tview[text alignment viewer]" \
|
||||
"index[index alignment]" \
|
||||
"idxstats[BAM index stats (r595 or later)]" \
|
||||
"fixmate[fix mate information]" \
|
||||
"flagstat[simple stats]" \
|
||||
"calmd[recalculate MD/NM tags and '=' bases]" \
|
||||
"merge[merge sorted alignments]" \
|
||||
"rmdup[remove PCR duplicates]" \
|
||||
"reheader[replace BAM header]" \
|
||||
"cat[concatenate BAMs]" \
|
||||
"bedcov[read depth per BED region]" \
|
||||
"targetcut[cut fosmid regions (for fosmid pool only)]" \
|
||||
"phase[phase heterozygotes]" \
|
||||
"bamshuf[shuffle and group alignments by name]"
|
||||
ret=0
|
||||
;;
|
||||
*)
|
||||
_files
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
@@ -27,6 +27,15 @@ To **load multiple identities** use the `identities` style, For example:
|
||||
zstyle :omz:plugins:ssh-agent identities id_rsa id_rsa2 id_github
|
||||
```
|
||||
|
||||
**NOTE:** the identities may be an absolute path if they are somewhere other than
|
||||
`~/.ssh`. For example:
|
||||
|
||||
```zsh
|
||||
zstyle :omz:plugins:ssh-agent identities ~/.config/ssh/id_rsa ~/.config/ssh/id_rsa2 ~/.config/ssh/id_github
|
||||
# which can be simplified to
|
||||
zstyle :omz:plugins:ssh-agent identities ~/.config/ssh/{id_rsa,id_rsa2,id_github}
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
To **set the maximum lifetime of the identities**, use the `lifetime` style.
|
||||
@@ -55,6 +64,15 @@ ssh-add -K -c -a /run/user/1000/ssh-auth <identities>
|
||||
|
||||
For valid `ssh-add` arguments run `ssh-add --help` or `man ssh-add`.
|
||||
|
||||
----
|
||||
|
||||
To set an **external helper** to ask for the passwords and possibly store
|
||||
them in the system keychain use the `helper` style. For example:
|
||||
|
||||
```zsh
|
||||
zstyle :omz:plugins:ssh-agent helper ksshaskpass
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
Based on code from Joseph M. Reagle: https://www.cygwin.com/ml/cygwin/2001-06/msg00537.html
|
||||
|
||||
@@ -1,84 +1,102 @@
|
||||
typeset _agent_forwarding _ssh_env_cache
|
||||
# Get the filename to store/lookup the environment from
|
||||
ssh_env_cache="$HOME/.ssh/environment-$SHORT_HOST"
|
||||
|
||||
function _start_agent() {
|
||||
local lifetime
|
||||
zstyle -s :omz:plugins:ssh-agent lifetime lifetime
|
||||
# Check if ssh-agent is already running
|
||||
if [[ -f "$ssh_env_cache" ]]; then
|
||||
. "$ssh_env_cache" > /dev/null
|
||||
|
||||
# start ssh-agent and setup environment
|
||||
echo Starting ssh-agent...
|
||||
ssh-agent -s ${lifetime:+-t} ${lifetime} | sed 's/^echo/#echo/' >! $_ssh_env_cache
|
||||
chmod 600 $_ssh_env_cache
|
||||
. $_ssh_env_cache > /dev/null
|
||||
{
|
||||
[[ "$USERNAME" = root ]] && command ps ax || command ps x
|
||||
} | command grep ssh-agent | command grep -q $SSH_AGENT_PID && return 0
|
||||
fi
|
||||
|
||||
# Set a maximum lifetime for identities added to ssh-agent
|
||||
local lifetime
|
||||
zstyle -s :omz:plugins:ssh-agent lifetime lifetime
|
||||
|
||||
# start ssh-agent and setup environment
|
||||
echo Starting ssh-agent...
|
||||
ssh-agent -s ${lifetime:+-t} ${lifetime} | sed '/^echo/d' >! "$ssh_env_cache"
|
||||
chmod 600 "$ssh_env_cache"
|
||||
. "$ssh_env_cache" > /dev/null
|
||||
}
|
||||
|
||||
function _add_identities() {
|
||||
local id line sig lines
|
||||
local -a identities loaded_sigs loaded_ids not_loaded
|
||||
zstyle -a :omz:plugins:ssh-agent identities identities
|
||||
local id file line sig lines
|
||||
local -a identities loaded_sigs loaded_ids not_loaded
|
||||
zstyle -a :omz:plugins:ssh-agent identities identities
|
||||
|
||||
# check for .ssh folder presence
|
||||
if [[ ! -d $HOME/.ssh ]]; then
|
||||
return
|
||||
fi
|
||||
# check for .ssh folder presence
|
||||
if [[ ! -d "$HOME/.ssh" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# add default keys if no identities were set up via zstyle
|
||||
# this is to mimic the call to ssh-add with no identities
|
||||
if [[ ${#identities} -eq 0 ]]; then
|
||||
# key list found on `ssh-add` man page's DESCRIPTION section
|
||||
for id in id_rsa id_dsa id_ecdsa id_ed25519 identity; do
|
||||
# check if file exists
|
||||
[[ -f "$HOME/.ssh/$id" ]] && identities+=$id
|
||||
done
|
||||
fi
|
||||
# add default keys if no identities were set up via zstyle
|
||||
# this is to mimic the call to ssh-add with no identities
|
||||
if [[ ${#identities} -eq 0 ]]; then
|
||||
# key list found on `ssh-add` man page's DESCRIPTION section
|
||||
for id in id_rsa id_dsa id_ecdsa id_ed25519 identity; do
|
||||
# check if file exists
|
||||
[[ -f "$HOME/.ssh/$id" ]] && identities+=($id)
|
||||
done
|
||||
fi
|
||||
|
||||
# get list of loaded identities' signatures and filenames
|
||||
if lines=$(ssh-add -l); then
|
||||
for line in ${(f)lines}; do
|
||||
loaded_sigs+=${${(z)line}[2]}
|
||||
loaded_ids+=${${(z)line}[3]}
|
||||
done
|
||||
fi
|
||||
# get list of loaded identities' signatures and filenames
|
||||
if lines=$(ssh-add -l); then
|
||||
for line in ${(f)lines}; do
|
||||
loaded_sigs+=${${(z)line}[2]}
|
||||
loaded_ids+=${${(z)line}[3]}
|
||||
done
|
||||
fi
|
||||
|
||||
# add identities if not already loaded
|
||||
for id in $identities; do
|
||||
# check for filename match, otherwise try for signature match
|
||||
if [[ ${loaded_ids[(I)$HOME/.ssh/$id]} -le 0 ]]; then
|
||||
sig="$(ssh-keygen -lf "$HOME/.ssh/$id" | awk '{print $2}')"
|
||||
[[ ${loaded_sigs[(I)$sig]} -le 0 ]] && not_loaded+="$HOME/.ssh/$id"
|
||||
fi
|
||||
done
|
||||
# add identities if not already loaded
|
||||
for id in $identities; do
|
||||
# if id is an absolute path, make file equal to id
|
||||
[[ "$id" = /* ]] && file="$id" || file="$HOME/.ssh/$id"
|
||||
# check for filename match, otherwise try for signature match
|
||||
if [[ ${loaded_ids[(I)$file]} -le 0 ]]; then
|
||||
sig="$(ssh-keygen -lf "$file" | awk '{print $2}')"
|
||||
[[ ${loaded_sigs[(I)$sig]} -le 0 ]] && not_loaded+=("$file")
|
||||
fi
|
||||
done
|
||||
|
||||
local args
|
||||
zstyle -a :omz:plugins:ssh-agent ssh-add-args args
|
||||
[[ -n "$not_loaded" ]] && ssh-add "${args[@]}" ${^not_loaded}
|
||||
# abort if no identities need to be loaded
|
||||
if [[ ${#not_loaded} -eq 0 ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# pass extra arguments to ssh-add
|
||||
local args
|
||||
zstyle -a :omz:plugins:ssh-agent ssh-add-args args
|
||||
|
||||
# use user specified helper to ask for password (ksshaskpass, etc)
|
||||
local helper
|
||||
zstyle -s :omz:plugins:ssh-agent helper helper
|
||||
|
||||
if [[ -n "$helper" ]]; then
|
||||
if [[ -z "${commands[$helper]}" ]]; then
|
||||
echo "ssh-agent: the helper '$helper' has not been found."
|
||||
else
|
||||
SSH_ASKPASS="$helper" ssh-add "${args[@]}" ${^not_loaded} < /dev/null
|
||||
return $?
|
||||
fi
|
||||
fi
|
||||
|
||||
ssh-add "${args[@]}" ${^not_loaded}
|
||||
}
|
||||
|
||||
# Get the filename to store/lookup the environment from
|
||||
_ssh_env_cache="$HOME/.ssh/environment-$SHORT_HOST"
|
||||
|
||||
# test if agent-forwarding is enabled
|
||||
zstyle -b :omz:plugins:ssh-agent agent-forwarding _agent_forwarding
|
||||
zstyle -b :omz:plugins:ssh-agent agent-forwarding agent_forwarding
|
||||
|
||||
if [[ $_agent_forwarding == "yes" && -n "$SSH_AUTH_SOCK" ]]; then
|
||||
# Add a nifty symlink for screen/tmux if agent forwarding
|
||||
[[ -L $SSH_AUTH_SOCK ]] || ln -sf "$SSH_AUTH_SOCK" /tmp/ssh-agent-$USERNAME-screen
|
||||
elif [[ -f "$_ssh_env_cache" ]]; then
|
||||
# Source SSH settings, if applicable
|
||||
. $_ssh_env_cache > /dev/null
|
||||
if [[ $USERNAME == "root" ]]; then
|
||||
FILTER="ax"
|
||||
else
|
||||
FILTER="x"
|
||||
fi
|
||||
ps $FILTER | grep ssh-agent | grep -q $SSH_AGENT_PID || {
|
||||
_start_agent
|
||||
}
|
||||
# Add a nifty symlink for screen/tmux if agent forwarding
|
||||
if [[ $agent_forwarding = "yes" && -n "$SSH_AUTH_SOCK" && ! -L "$SSH_AUTH_SOCK" ]]; then
|
||||
ln -sf "$SSH_AUTH_SOCK" /tmp/ssh-agent-$USERNAME-screen
|
||||
else
|
||||
_start_agent
|
||||
_start_agent
|
||||
fi
|
||||
|
||||
_add_identities
|
||||
|
||||
# tidy up after ourselves
|
||||
unset _agent_forwarding _ssh_env_cache
|
||||
unset agent_forwarding ssh_env_cache
|
||||
unfunction _start_agent _add_identities
|
||||
|
||||
@@ -15,48 +15,76 @@
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
__sudo-replace-buffer() {
|
||||
local old=$1 new=$2 space=${2:+ }
|
||||
if [[ ${#LBUFFER} -le ${#old} ]]; then
|
||||
RBUFFER="${space}${BUFFER#$old }"
|
||||
LBUFFER="${new}"
|
||||
else
|
||||
LBUFFER="${new}${space}${LBUFFER#$old }"
|
||||
fi
|
||||
local old=$1 new=$2 space=${2:+ }
|
||||
if [[ ${#LBUFFER} -le ${#old} ]]; then
|
||||
RBUFFER="${space}${BUFFER#$old }"
|
||||
LBUFFER="${new}"
|
||||
else
|
||||
LBUFFER="${new}${space}${LBUFFER#$old }"
|
||||
fi
|
||||
}
|
||||
|
||||
sudo-command-line() {
|
||||
[[ -z $BUFFER ]] && LBUFFER="$(fc -ln -1)"
|
||||
# If line is empty, get the last run command from history
|
||||
[[ -z $BUFFER ]] && LBUFFER="$(fc -ln -1)"
|
||||
|
||||
# Save beginning space
|
||||
local WHITESPACE=""
|
||||
if [[ ${LBUFFER:0:1} = " " ]]; then
|
||||
WHITESPACE=" "
|
||||
LBUFFER="${LBUFFER:1}"
|
||||
# Save beginning space
|
||||
local WHITESPACE=""
|
||||
if [[ ${LBUFFER:0:1} = " " ]]; then
|
||||
WHITESPACE=" "
|
||||
LBUFFER="${LBUFFER:1}"
|
||||
fi
|
||||
|
||||
# If $EDITOR is not set, just toggle the sudo prefix on and off
|
||||
if [[ -z "$EDITOR" ]]; then
|
||||
case "$BUFFER" in
|
||||
sudoedit\ *) __sudo-replace-buffer "sudoedit" "" ;;
|
||||
sudo\ *) __sudo-replace-buffer "sudo" "" ;;
|
||||
*) LBUFFER="sudo $LBUFFER" ;;
|
||||
esac
|
||||
else
|
||||
# Check if the typed command is really an alias to $EDITOR
|
||||
|
||||
# Get the first part of the typed command
|
||||
local cmd="${${(Az)BUFFER}[1]}"
|
||||
# Get the first part of the alias of the same name as $cmd, or $cmd if no alias matches
|
||||
local realcmd="${${(Az)aliases[$cmd]}[1]:-$cmd}"
|
||||
# Get the first part of the $EDITOR command ($EDITOR may have arguments after it)
|
||||
local editorcmd="${${(Az)EDITOR}[1]}"
|
||||
|
||||
# Note: ${var:c} makes a $PATH search and expands $var to the full path
|
||||
# The if condition is met when:
|
||||
# - $realcmd is '$EDITOR'
|
||||
# - $realcmd is "cmd" and $EDITOR is "cmd"
|
||||
# - $realcmd is "cmd" and $EDITOR is "cmd --with --arguments"
|
||||
# - $realcmd is "/path/to/cmd" and $EDITOR is "cmd"
|
||||
# - $realcmd is "/path/to/cmd" and $EDITOR is "/path/to/cmd"
|
||||
# or
|
||||
# - $realcmd is "cmd" and $EDITOR is "cmd"
|
||||
# - $realcmd is "cmd" and $EDITOR is "/path/to/cmd"
|
||||
# or
|
||||
# - $realcmd is "cmd" and $EDITOR is /alternative/path/to/cmd that appears in $PATH
|
||||
if [[ "$realcmd" = (\$EDITOR|$editorcmd|${editorcmd:c}) \
|
||||
|| "${realcmd:c}" = ($editorcmd|${editorcmd:c}) ]] \
|
||||
|| builtin which -a "$realcmd" | command grep -Fx -q "$editorcmd"; then
|
||||
editorcmd="$cmd" # replace $editorcmd with the typed command so it matches below
|
||||
fi
|
||||
|
||||
# Get the first part of the typed command and check if it's an alias to $EDITOR
|
||||
# If so, locally change $EDITOR to the alias so that it matches below
|
||||
if [[ -n "$EDITOR" ]]; then
|
||||
local cmd="${${(Az)BUFFER}[1]}"
|
||||
if [[ "${aliases[$cmd]} " = (\$EDITOR|$EDITOR)\ * ]]; then
|
||||
local EDITOR="$cmd"
|
||||
fi
|
||||
fi
|
||||
# Check for editor commands in the typed command and replace accordingly
|
||||
case "$BUFFER" in
|
||||
$editorcmd\ *) __sudo-replace-buffer "$editorcmd" "sudoedit" ;;
|
||||
\$EDITOR\ *) __sudo-replace-buffer '$EDITOR' "sudoedit" ;;
|
||||
sudoedit\ *) __sudo-replace-buffer "sudoedit" "$EDITOR" ;;
|
||||
sudo\ *) __sudo-replace-buffer "sudo" "" ;;
|
||||
*) LBUFFER="sudo $LBUFFER" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
if [[ -n $EDITOR && $BUFFER = $EDITOR\ * ]]; then
|
||||
__sudo-replace-buffer "$EDITOR" "sudoedit"
|
||||
elif [[ -n $EDITOR && $BUFFER = \$EDITOR\ * ]]; then
|
||||
__sudo-replace-buffer "\$EDITOR" "sudoedit"
|
||||
elif [[ $BUFFER = sudoedit\ * ]]; then
|
||||
__sudo-replace-buffer "sudoedit" "$EDITOR"
|
||||
elif [[ $BUFFER = sudo\ * ]]; then
|
||||
__sudo-replace-buffer "sudo" ""
|
||||
else
|
||||
LBUFFER="sudo $LBUFFER"
|
||||
fi
|
||||
# Preserve beginning space
|
||||
LBUFFER="${WHITESPACE}${LBUFFER}"
|
||||
|
||||
# Preserve beginning space
|
||||
LBUFFER="${WHITESPACE}${LBUFFER}"
|
||||
# Redisplay edit buffer (compatibility with zsh-syntax-highlighting)
|
||||
zle redisplay
|
||||
}
|
||||
|
||||
zle -N sudo-command-line
|
||||
|
||||
@@ -1 +1,14 @@
|
||||
# DECLARION: This plugin was created by hhatto. What I did is just making a portal from https://bitbucket.org/hhatto/zshcompfunc4supervisor.
|
||||
|
||||
alias sup='sudo supervisorctl'
|
||||
alias supad='sudo supervisorctl add'
|
||||
alias supa='sudo supervisorctl avail'
|
||||
alias suprl='sudo supervisorctl reload'
|
||||
alias suprm='sudo supervisorctl remove'
|
||||
alias suprr='sudo supervisorctl reread'
|
||||
alias suprs='sudo supervisorctl restart'
|
||||
alias sups='sudo supervisorctl status'
|
||||
alias supsr='sudo supervisorctl start'
|
||||
alias supso='sudo supervisorctl stop'
|
||||
alias supt='sudo supervisorctl tail'
|
||||
alias supu='sudo supervisorctl update'
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
**Maintainer**: [r-darwish](https://github.com/r-darwish)
|
||||
|
||||
Alias for Zypper according to the official Zypper's alias
|
||||
Alias for Zypper according to the official Zypper's alias
|
||||
|
||||
To use it add `suse` to the plugins array in you zshrc file.
|
||||
To use it add `suse` to the plugins array in you zshrc file.
|
||||
|
||||
```zsh
|
||||
plugins=(... suse)
|
||||
@@ -60,6 +60,12 @@ plugins=(... suse)
|
||||
| zse | `zypper se` | search for packages |
|
||||
| zwp | `zypper wp` | list all packages providing the specified capability |
|
||||
|
||||
NOTE: `--no-refresh` is passed to zypper for speeding up the calls and avoid errors due to lack
|
||||
of root privileges. If you need to refresh the repositories, call `sudo zypper ref` (`zref` alias)
|
||||
before runing these aliases.
|
||||
|
||||
Related: [#9798](https://github.com/ohmyzsh/ohmyzsh/pull/9798).
|
||||
|
||||
## Repositories commands
|
||||
|
||||
| Alias | Commands | Description |
|
||||
|
||||
@@ -25,16 +25,16 @@ alias zup='sudo zypper up'
|
||||
alias zpatch='sudo zypper patch'
|
||||
|
||||
#Request commands
|
||||
alias zif='zypper if'
|
||||
alias zpa='zypper pa'
|
||||
alias zpatch-info='zypper patch-info'
|
||||
alias zpattern-info='zypper pattern-info'
|
||||
alias zproduct-info='zypper product-info'
|
||||
alias zpch='zypper pch'
|
||||
alias zpd='zypper pd'
|
||||
alias zpt='zypper pt'
|
||||
alias zse='zypper se'
|
||||
alias zwp='zypper wp'
|
||||
alias zif='zypper --no-refresh if'
|
||||
alias zpa='zypper --no-refresh pa'
|
||||
alias zpatch-info='zypper --no-refresh patch-info'
|
||||
alias zpattern-info='zypper --no-refresh pattern-info'
|
||||
alias zproduct-info='zypper --no-refresh product-info'
|
||||
alias zpch='zypper --no-refresh pch'
|
||||
alias zpd='zypper --no-refresh pd'
|
||||
alias zpt='zypper --no-refresh pt'
|
||||
alias zse='zypper --no-refresh se'
|
||||
alias zwp='zypper --no-refresh wp'
|
||||
|
||||
#Repositories commands
|
||||
alias zar='sudo zypper ar'
|
||||
|
||||
@@ -5,7 +5,8 @@ if [[ -z $commands[thefuck] ]]; then
|
||||
fi
|
||||
|
||||
# Register alias
|
||||
eval "$(thefuck --alias)"
|
||||
[[ ! -a $ZSH_CACHE_DIR/thefuck ]] && thefuck --alias > $ZSH_CACHE_DIR/thefuck
|
||||
source $ZSH_CACHE_DIR/thefuck
|
||||
|
||||
fuck-command-line() {
|
||||
local FUCK="$(THEFUCK_REQUIRE_CONFIRMATION=0 thefuck $(fc -ln -1 | tail -n 1) 2> /dev/null)"
|
||||
|
||||
@@ -15,6 +15,9 @@ alias agli='apt list --installed'
|
||||
# List available updates only
|
||||
alias aglu='apt list --upgradable'
|
||||
|
||||
alias acsp='apt-cache showpkg'
|
||||
compdef _acsp acsp='apt-cache showpkg'
|
||||
|
||||
# superuser operations ######################################################
|
||||
|
||||
alias afu='sudo apt-file update'
|
||||
|
||||
@@ -69,7 +69,7 @@ __box_list ()
|
||||
|
||||
__vm_list ()
|
||||
{
|
||||
_wanted application expl 'command' compadd $(command grep "${VAGRANT_CWD:-.}/Vagrantfile" -oe '^[^#]*\.vm\.define *[:"]\([a-zA-Z0-9_-]\+\)' 2>/dev/null | awk '{print substr($2, 2)}')
|
||||
_wanted application expl 'command' compadd $(command grep "${VAGRANT_CWD:-.}/Vagrantfile" -oe '^[^#]*\.vm\.define *[:"]\([a-zA-Z0-9\._-]\+\)' 2>/dev/null | awk '{print substr($2, 2)}')
|
||||
_wanted application expl 'command' compadd $(command ls "${VAGRANT_CWD:-.}/.vagrant/machines/" 2>/dev/null)
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,8 @@ EOH
|
||||
local cmd=""
|
||||
local before="<esc>"
|
||||
local after=""
|
||||
local name="GVIM"
|
||||
# Look up the newest instance
|
||||
local name="$(gvim --serverlist | tail -n 1)"
|
||||
while getopts ":b:a:n:" option
|
||||
do
|
||||
case $option in
|
||||
|
||||
@@ -35,27 +35,19 @@ if [[ ! $DISABLE_VENV_CD -eq 1 ]]; then
|
||||
function workon_cwd {
|
||||
if [[ -z "$WORKON_CWD" ]]; then
|
||||
local WORKON_CWD=1
|
||||
# Check if this is a Git repo
|
||||
local GIT_REPO_ROOT=""
|
||||
local GIT_TOPLEVEL="$(git rev-parse --show-toplevel 2> /dev/null)"
|
||||
if [[ $? == 0 ]]; then
|
||||
GIT_REPO_ROOT="$GIT_TOPLEVEL"
|
||||
fi
|
||||
# Get absolute path, resolving symlinks
|
||||
local PROJECT_ROOT="${PWD:A}"
|
||||
while [[ "$PROJECT_ROOT" != "/" && ! -e "$PROJECT_ROOT/.venv" \
|
||||
&& ! -d "$PROJECT_ROOT/.git" && "$PROJECT_ROOT" != "$GIT_REPO_ROOT" ]]; do
|
||||
&& ! -d "$PROJECT_ROOT/.git" ]]; do
|
||||
PROJECT_ROOT="${PROJECT_ROOT:h}"
|
||||
done
|
||||
if [[ "$PROJECT_ROOT" == "/" ]]; then
|
||||
PROJECT_ROOT="."
|
||||
fi
|
||||
|
||||
# Check for virtualenv name override
|
||||
if [[ -f "$PROJECT_ROOT/.venv" ]]; then
|
||||
ENV_NAME="$(cat "$PROJECT_ROOT/.venv")"
|
||||
elif [[ -f "$PROJECT_ROOT/.venv/bin/activate" ]];then
|
||||
ENV_NAME="$PROJECT_ROOT/.venv"
|
||||
elif [[ "$PROJECT_ROOT" != "." ]]; then
|
||||
elif [[ "$PROJECT_ROOT" != "/" ]]; then
|
||||
ENV_NAME="${PROJECT_ROOT:t}"
|
||||
else
|
||||
ENV_NAME=""
|
||||
@@ -68,14 +60,21 @@ if [[ ! $DISABLE_VENV_CD -eq 1 ]]; then
|
||||
fi
|
||||
if [[ "$ENV_NAME" != "" ]]; then
|
||||
# Activate the environment only if it is not already active
|
||||
if [[ "$VIRTUAL_ENV" != "$WORKON_HOME/$ENV_NAME" ]]; then
|
||||
if [[ ! "$VIRTUAL_ENV" -ef "$WORKON_HOME/$ENV_NAME" ]]; then
|
||||
if [[ -e "$WORKON_HOME/$ENV_NAME/bin/activate" ]]; then
|
||||
workon "$ENV_NAME" && export CD_VIRTUAL_ENV="$ENV_NAME"
|
||||
elif [[ -e "$ENV_NAME/bin/activate" ]]; then
|
||||
source $ENV_NAME/bin/activate && export CD_VIRTUAL_ENV="$ENV_NAME"
|
||||
else
|
||||
ENV_NAME=""
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [[ "$ENV_NAME" == "" && -n $CD_VIRTUAL_ENV && -n $VIRTUAL_ENV ]]; then
|
||||
# We've just left the repo, deactivate the environment
|
||||
# Note: this only happens if the virtualenv was activated automatically
|
||||
deactivate && unset CD_VIRTUAL_ENV
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
14
zsh/plugins/zoxide/README.md
Normal file
14
zsh/plugins/zoxide/README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# zoxide plugin
|
||||
|
||||
Initializes [zoxide](https://github.com/ajeetdsouza/zoxide), a smarter cd
|
||||
command for your terminal.
|
||||
|
||||

|
||||
|
||||
To use it, add `zoxide` to the plugins array in your `.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... zoxide)
|
||||
```
|
||||
|
||||
**Note:** you have to [install zoxide](https://github.com/ajeetdsouza/zoxide#step-1-install-zoxide) first.
|
||||
5
zsh/plugins/zoxide/zoxide.plugin.zsh
Normal file
5
zsh/plugins/zoxide/zoxide.plugin.zsh
Normal file
@@ -0,0 +1,5 @@
|
||||
if (( $+commands[zoxide] )); then
|
||||
eval "$(zoxide init zsh)"
|
||||
else
|
||||
echo '[oh-my-zsh] zoxide not found, please install it from https://github.com/ajeetdsouza/zoxide'
|
||||
fi
|
||||
Reference in New Issue
Block a user