我正在尝试为我的~/.bashrc
文件定义一个便捷函数,然后为该文件提供源代码。但是,当我source ~/.bashrc
执行它定义的功能时。我究竟做错了什么?功能如下:
# tensorflow object detection train call
train_net()
{
python /root/training/model_main.py \
--pipeline_config_path=/root/training/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync.config \
--model_dir=/root/training/models \
--num_train_steps=100000 \
--sample_1_of_n_eval_examples=1 \
--logtostderr=true
}
嗯。好的,谢谢,将在进程完成运行后立即修复此帖子,以使其与最小/完整示例保持一致。顺便说一句,我整个~/.bashrc
是
# ~/.bashrc: executed by bash(1) for non-login shells.
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
# for examples
# If not running interactively, don't do anything
[ -z "$PS1" ] && return
# don't put duplicate lines in the history. See bash(1) for more options
# ... or force ignoredups and ignorespace
HISTCONTROL=ignoredups:ignorespace
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=1000
HISTFILESIZE=2000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# make less more friendly for non-text input files, see lesspipe(1)
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
xterm-color) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
#force_color_prompt=yes
if [ -n "$force_color_prompt" ]; then
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
if [ "$color_prompt" = yes ]; then
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
fi
unset color_prompt force_color_prompt
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# Alias definitions.
# You may want to put all your additions into a separate file like
# ~/.bash_aliases, instead of adding them here directly.
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
# enable programmable completion features (you don't need to enable
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
# sources /etc/bash.bashrc).
#if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
# . /etc/bash_completion
#fi
. /opt/conda/etc/profile.d/conda.sh
conda activate base
export PYTHONPATH=/tensorflow/models/research:/tensorflow/models/research/slim
export NO_AT_BRIDGE=1
jp()
{
jupyter notebook --ip 0.0.0.0 --no-browser --allow-root --port=8888
}
jl()
{
jupyter lab --ip 0.0.0.0 --no-browser --allow-root --port=8888
}
# Run from the tensorflow/models/research/ directory
train_net()
{
python /root/training/model_main.py \
--pipeline_config_path=/root/training/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync.config \
--model_dir=/root/training/models \
--num_train_steps=100000 \
--sample_1_of_n_eval_examples=1 \
--logtostderr=true
}
# Run from the tensorflow/models/research/ directory
eval_net()
{
python /root/training/model_main.py \
--pipeline_config_path=/root/training/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync.config \
--model_dir=/root/training/models \
--checkpoint_dir=/root/training/models/train \
--sample_1_of_n_eval_examples=1 \
--eval_training_data=False \
--sample_1_of_n_eval_on_train_examples=5 \
--logtostderr=true
}
# Run TensorBoard
tb(){
tensorboard --logdir=/root/training/models
}