mirror of
https://github.com/hmemcpy/milewski-ctfp-pdf.git
synced 2024-11-25 07:52:03 +03:00
refactor: CI and nix (#306)
* fix: remove custom fonts Since they are available in Nix, there is no need to keep them in the project anymore * chore: remove old obsolete files * refactor: rewrite Nix files - Switch from `numtide/flake-utils` to `flake-parts` - Add custom font derivation for LaTeX - Add `formatter` - Switch to `python311` * ci: update Github workflows * feat: add `Makefile` for local development Very useful when used in combination with `nix develop` * feat: add `.envrc` file for loading development environment with `nix-direnv` * feat: add `.editorconfig` and `.prettierrc` * style: reformat files using `prettier` Run `nix run nixpkgs#nodePackages.prettier -- --write .` * fix: add workaround to prevent bug with `minted` package see https://github.com/gpoore/minted/issues/353 for context * fix: add `version.tex` in the repo * chore: rewrite `README` * chore: ignore LaTeX temporary files while building locally * feat: add `latexindent.pl` configuration file * style: lint LaTeX files
This commit is contained in:
parent
98b71ac267
commit
de799935b2
21
.editorconfig
Normal file
21
.editorconfig
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_size = 4
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[Makefile]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[*.{tex,cls,lua,nix}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
max_line_length = 80
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
indent_size = 2
|
||||||
|
max_line_length = 80
|
51
.github/settings.yml
vendored
Normal file
51
.github/settings.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# https://github.com/probot/settings
|
||||||
|
|
||||||
|
branches:
|
||||||
|
- name: master
|
||||||
|
protection:
|
||||||
|
enforce_admins: false
|
||||||
|
required_pull_request_reviews:
|
||||||
|
dismiss_stale_reviews: true
|
||||||
|
require_code_owner_reviews: true
|
||||||
|
required_approving_review_count: 1
|
||||||
|
restrictions: null
|
||||||
|
required_linear_history: true
|
||||||
|
required_status_checks:
|
||||||
|
strict: true
|
||||||
|
|
||||||
|
labels:
|
||||||
|
- name: typo
|
||||||
|
color: ee0701
|
||||||
|
|
||||||
|
- name: dependencies
|
||||||
|
color: 0366d6
|
||||||
|
|
||||||
|
- name: enhancement
|
||||||
|
color: 0e8a16
|
||||||
|
|
||||||
|
- name: question
|
||||||
|
color: cc317c
|
||||||
|
|
||||||
|
- name: security
|
||||||
|
color: ee0701
|
||||||
|
|
||||||
|
- name: stale
|
||||||
|
color: eeeeee
|
||||||
|
|
||||||
|
repository:
|
||||||
|
allow_merge_commit: true
|
||||||
|
allow_rebase_merge: true
|
||||||
|
allow_squash_merge: true
|
||||||
|
default_branch: master
|
||||||
|
description:
|
||||||
|
"Bartosz Milewski's 'Category Theory for Programmers' unofficial PDF and
|
||||||
|
LaTeX sources"
|
||||||
|
homepage: https://bartoszmilewski.com/2014/10/28/category-theory-for-programmers-the-preface/
|
||||||
|
topics: pdf,haskell,scala,latex,cpp,functional-programming,ocaml,category-theory
|
||||||
|
has_downloads: true
|
||||||
|
has_issues: true
|
||||||
|
has_pages: false
|
||||||
|
has_projects: false
|
||||||
|
has_wiki: false
|
||||||
|
name: milewski-ctfp-pdf
|
||||||
|
private: false
|
117
.github/workflows/build.yaml
vendored
117
.github/workflows/build.yaml
vendored
@ -1,117 +0,0 @@
|
|||||||
name: Build PDFs
|
|
||||||
|
|
||||||
on:
|
|
||||||
- push
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependencies:
|
|
||||||
name: Build dependencies
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
version: ${{ steps.version.outputs.version }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Git repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
fetch-depth: 1
|
|
||||||
|
|
||||||
- name: Create global variables
|
|
||||||
id: version
|
|
||||||
run: echo "::set-output name=version::$(git rev-parse --short HEAD)"
|
|
||||||
|
|
||||||
determine-matrix:
|
|
||||||
name: Figure out the packages we need to build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [dependencies]
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Git repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
fetch-depth: 1
|
|
||||||
|
|
||||||
- name: Install the Nix package manager
|
|
||||||
uses: cachix/install-nix-action@v16
|
|
||||||
|
|
||||||
- id: set-matrix
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=matrix::$(
|
|
||||||
nix eval --json --impure \
|
|
||||||
--expr 'builtins.attrNames (import ./.).packages.x86_64-linux'
|
|
||||||
)"
|
|
||||||
|
|
||||||
build:
|
|
||||||
name: Build documents
|
|
||||||
needs: determine-matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
packages: ${{fromJson(needs.determine-matrix.outputs.matrix)}}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Set up Git repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
fetch-depth: 1
|
|
||||||
|
|
||||||
- name: Install Nix
|
|
||||||
uses: cachix/install-nix-action@v16
|
|
||||||
|
|
||||||
- name: Build ${{ matrix.packages }}.pdf
|
|
||||||
run: |
|
|
||||||
nix build .#${{ matrix.packages }}
|
|
||||||
mkdir -p out
|
|
||||||
cp -ar ./result/* out/
|
|
||||||
|
|
||||||
- name: Upload build assets (${{ matrix.packages }}.pdf)
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ctfp
|
|
||||||
path: out/*
|
|
||||||
|
|
||||||
release:
|
|
||||||
name: "Create Github tag/pre-release"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [dependencies, build]
|
|
||||||
outputs:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
steps:
|
|
||||||
- name: Create Github pre-release (${{ needs.dependencies.outputs.version }})
|
|
||||||
id: create_release
|
|
||||||
uses: actions/create-release@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: v${{ github.run_number }}-${{ needs.dependencies.outputs.version }}
|
|
||||||
release_name: Version ${{ github.run_number }} (${{ needs.dependencies.outputs.version }})
|
|
||||||
draft: false
|
|
||||||
prerelease: true
|
|
||||||
|
|
||||||
assets:
|
|
||||||
name: Upload release assets
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [determine-matrix, dependencies, release]
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
packages: ${{fromJson(needs.determine-matrix.outputs.matrix)}}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download build assets (${{ matrix.packages }}.pdf)
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ctfp
|
|
||||||
path: ctfp
|
|
||||||
|
|
||||||
- name: Upload release assets (${{ matrix.packages }}--${{ needs.dependencies.outputs.version }}.pdf)
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.release.outputs.upload_url }}
|
|
||||||
asset_path: ctfp/${{ matrix.packages }}.pdf
|
|
||||||
asset_name: ${{ matrix.packages }}--${{ needs.dependencies.outputs.version }}.pdf
|
|
||||||
asset_content_type: application/pdf
|
|
64
.github/workflows/nix-flake-check.yaml
vendored
Normal file
64
.github/workflows/nix-flake-check.yaml
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
name: Check and build
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
jobs:
|
||||||
|
dependencies:
|
||||||
|
name: Build dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.version.outputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Create global variables
|
||||||
|
id: version
|
||||||
|
run:
|
||||||
|
echo "version=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
determine-matrix:
|
||||||
|
name: Figure out the packages we need to build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [dependencies]
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install the Nix package manager
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- id: set-matrix
|
||||||
|
run: |
|
||||||
|
echo "matrix=$(
|
||||||
|
nix eval --json .#packages.x86_64-linux --apply builtins.attrNames
|
||||||
|
)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
needs: determine-matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
packages: ${{fromJson(needs.determine-matrix.outputs.matrix)}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- name: Nix flake check
|
||||||
|
run: nix flake check
|
||||||
|
|
||||||
|
- name: Build ${{ matrix.packages }}.pdf
|
||||||
|
run: nix build .#${{ matrix.packages }}
|
17
.github/workflows/nix-fmt-checks.yaml
vendored
Normal file
17
.github/workflows/nix-fmt-checks.yaml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
name: Nix formatter checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
format-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- name: Run nix formatter tool
|
||||||
|
run: nix fmt . -- --check
|
18
.github/workflows/prettier-checks.yaml
vendored
Normal file
18
.github/workflows/prettier-checks.yaml
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
name: Prettier checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prettier:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install the Nix package manager
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- name: Checks
|
||||||
|
run: nix run nixpkgs#nodePackages.prettier -- --check .
|
99
.github/workflows/release.yaml
vendored
Normal file
99
.github/workflows/release.yaml
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
name: Release PDFs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
determine-matrix:
|
||||||
|
name: Figure out the assets we need to build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install the Nix package manager
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- id: set-matrix
|
||||||
|
run: |
|
||||||
|
echo "matrix=$(
|
||||||
|
nix eval --json --impure \
|
||||||
|
--expr 'builtins.filter (x: (null == builtins.match "(.*)-nts" x)) (builtins.attrNames (import ./.).packages.x86_64-linux)'
|
||||||
|
)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build documents
|
||||||
|
needs: determine-matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
packages: ${{fromJson(needs.determine-matrix.outputs.matrix)}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install Nix
|
||||||
|
uses: cachix/install-nix-action@v18
|
||||||
|
|
||||||
|
- name: Build ${{ matrix.packages }}.pdf
|
||||||
|
run: |
|
||||||
|
nix build .#${{ matrix.packages }}
|
||||||
|
mkdir -p out
|
||||||
|
cp -ar ./result/* out/
|
||||||
|
|
||||||
|
- name: Upload build assets (${{ matrix.packages }}.pdf)
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ctfp
|
||||||
|
path: out/*
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: "Create Github pre-release"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build]
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
steps:
|
||||||
|
- name: Create Github pre-release (${{ github.ref }})
|
||||||
|
id: create_release
|
||||||
|
uses: actions/create-release@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref }}
|
||||||
|
release_name: ${{ github.ref }}
|
||||||
|
draft: true
|
||||||
|
|
||||||
|
assets:
|
||||||
|
name: Upload release assets
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [determine-matrix, release]
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
packages: ${{fromJson(needs.determine-matrix.outputs.matrix)}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download build assets (${{ matrix.packages }}.pdf)
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ctfp
|
||||||
|
path: ctfp
|
||||||
|
|
||||||
|
- name:
|
||||||
|
Upload release assets (${{ matrix.packages }}--${{ github.ref
|
||||||
|
}}.pdf)
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ needs.release.outputs.upload_url }}
|
||||||
|
asset_path: ctfp/${{ matrix.packages }}.pdf
|
||||||
|
asset_name: ${{ matrix.packages }}--${{ github.ref }}.pdf
|
||||||
|
asset_content_type: application/pdf
|
28
.gitignore
vendored
28
.gitignore
vendored
@ -1,16 +1,26 @@
|
|||||||
.vscode/
|
/.vscode/
|
||||||
*.fls
|
/.direnv/
|
||||||
|
/build/
|
||||||
|
/result
|
||||||
|
.DS_Store
|
||||||
|
src/.dotty-ide-disabled
|
||||||
|
src/.metals
|
||||||
|
out/
|
||||||
_minted*
|
_minted*
|
||||||
*.fdb_latexmk
|
*.fls
|
||||||
out/*
|
out/*
|
||||||
|
*.fdb_latexmk
|
||||||
|
*.bak*
|
||||||
|
*.log
|
||||||
*.pdf
|
*.pdf
|
||||||
*.xdv
|
*.xdv
|
||||||
*.gz
|
*.gz
|
||||||
src/version.tex
|
|
||||||
*.pyg
|
*.pyg
|
||||||
*.synctex(busy)
|
*.synctex(busy)
|
||||||
src/.dotty-ide-disabled
|
*.aux
|
||||||
venv/*
|
*.idx
|
||||||
.DS_Store
|
*.ilg
|
||||||
src/.metals
|
*.lig
|
||||||
out/
|
*.ind
|
||||||
|
*.out
|
||||||
|
*.toc
|
||||||
|
7
.latexindent.yaml
Normal file
7
.latexindent.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
defaultIndent: " "
|
||||||
|
verbatimEnvironments:
|
||||||
|
verbatim: 1
|
||||||
|
lstlisting: 1
|
||||||
|
minted: 1
|
||||||
|
snip: 1
|
||||||
|
snipv: 1
|
6
.prettierignore
Normal file
6
.prettierignore
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/.direnv/
|
||||||
|
/.idea/
|
||||||
|
/vendor/
|
||||||
|
/docs/
|
||||||
|
/build/
|
||||||
|
CHANGELOG.md
|
3
.prettierrc
Normal file
3
.prettierrc
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"proseWrap": "always"
|
||||||
|
}
|
32
.travis.yml
32
.travis.yml
@ -1,32 +0,0 @@
|
|||||||
language:
|
|
||||||
- nix
|
|
||||||
|
|
||||||
script:
|
|
||||||
- nix-shell --pure --command 'cd src; make all'
|
|
||||||
|
|
||||||
cache:
|
|
||||||
directories:
|
|
||||||
src/_minted-ctfp
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
- provider: releases
|
|
||||||
api_key:
|
|
||||||
secure: usNWQZc/HcrZxR72+Qz2YXeyf56h0EwLVl6oOXIivxTWQCTTQ0cE6tXG6lP8JxQ1qlfAhbdh4QOmz6jxQD60HxsUwDWmt9t51o+8n3EMeID0hc45ES40/mbieUfR1jbkLbwmL16Z/bWsvUErmGuUgOxYrUCplK9fWs7Vvt8xmReFdunw8XpVrywG4rl1jXDo9YWihMps5KLOqd3mY3yvlxAmB+UKkhHPNdcEuEghEBhC8HZoZkFiwfRw/8PeOh9VpnQ2ht9eDXOlB7zNYY9Xr/S98FbzfZXxFjApXVRgNP8k2UYyHn8HCmIoCSs+Jv06tEvNCuwTvj0JNsuoW7vu/Q7wrrpScfDL+WnSw2CScml+xAe7Q4caoZKkzaMCnj5fHbPEZ731+SLZNbG6TMTYhMqhFm0Fr87bwdNlayqAssIhOwU4ca3pnZOQFO4vNOWblNfbHsX5F9sJDOR0uD4Y+PgfNWgTsssXqei7owBJNTe+qz5Q7IaFA3A8EMp57CV1dUCgmjxVuugtz6DLpl16WGiWpqakIA900GXMG+2c4ENCCDWmYtGlpWs2lSBqRYHV1gncE8UGtIih8G6g5jQd2aJUQKHKuaEiv/28WLI7f2exUuOBmc0ce4xP+qZAs5XWiPo4jJLNyb81uNIZ0jCb/c5+lEH8EsYF+jFOrmw17GQ=
|
|
||||||
file: out/**/*.pdf
|
|
||||||
file_glob: true
|
|
||||||
skip_cleanup: true
|
|
||||||
on:
|
|
||||||
repo: hmemcpy/milewski-ctfp-pdf
|
|
||||||
branch: master
|
|
||||||
tags: true
|
|
||||||
- provider: s3
|
|
||||||
bucket: milewski-ctfp-pdf
|
|
||||||
access_key_id: '$ARTIFACTS_KEY'
|
|
||||||
secret_access_key: '$ARTIFACTS_SECRET'
|
|
||||||
region: us-east-1
|
|
||||||
acl: public_read
|
|
||||||
local_dir: out
|
|
||||||
skip_cleanup: true
|
|
||||||
on:
|
|
||||||
all_branches: true
|
|
||||||
repo: hmemcpy/milewski-ctfp-pdf
|
|
31
Makefile
Normal file
31
Makefile
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
OUTPUT ?= $(shell basename "$(shell dirname "$(INPUT)")")
|
||||||
|
OUTPUT_DIRECTORY = $(shell pwd)/build
|
||||||
|
LATEXMK_ARGS ?= -f -file-line-error -shell-escape -logfilewarninglist -interaction=nonstopmode -halt-on-error -norc -pdflatex="xelatex %O %S" -pdfxe
|
||||||
|
TEXINPUTS = ""
|
||||||
|
TEXLIVE_RUN = TEXINPUTS=$(TEXINPUTS)
|
||||||
|
LATEXMK_COMMAND = $(TEXLIVE_RUN) latexmk $(LATEXMK_ARGS)
|
||||||
|
|
||||||
|
# Make does not offer a recursive wildcard function, so here's one:
|
||||||
|
rwildcard=$(wildcard $1$2) $(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2))
|
||||||
|
|
||||||
|
ctfp:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp ctfp-reader.tex
|
||||||
|
|
||||||
|
ctfp-ocaml:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp-ocaml ctfp-reader-ocaml.tex
|
||||||
|
|
||||||
|
ctfp-scala:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp-scala ctfp-reader-scala.tex
|
||||||
|
|
||||||
|
ctfp-print:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp-print ctfp-print.tex
|
||||||
|
|
||||||
|
ctfp-print-ocaml:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp-print-ocaml ctfp-print-ocaml.tex
|
||||||
|
|
||||||
|
ctfp-print-scala:
|
||||||
|
cd src; $(LATEXMK_COMMAND) -jobname=ctfp-print-scala ctfp-print-scala.tex
|
||||||
|
|
||||||
|
lint:
|
||||||
|
$(foreach file, $(call rwildcard,$(shell dirname "$(INPUT)"),*.tex), latexindent -l -w $(file);)
|
||||||
|
|
140
README.md
140
README.md
@ -1,76 +1,106 @@
|
|||||||
Category Theory for Programmers
|
![GitHub stars][github stars]
|
||||||
====
|
[![GitHub Workflow Status][github workflow status]][github actions link]
|
||||||
![image](https://user-images.githubusercontent.com/601206/43392303-f770d7be-93fb-11e8-8db8-b7e915b435ba.png)
|
[![Download][download badge]][github latest release]
|
||||||
<b>Direct link: [category-theory-for-programmers.pdf](https://github.com/hmemcpy/milewski-ctfp-pdf/releases/download/v1.3.0/category-theory-for-programmers.pdf)</b>
|
[![License][license badge]][github latest release]
|
||||||
(Latest release: v1.3.0, August 2019. See [releases](https://github.com/hmemcpy/milewski-ctfp-pdf/releases) for additional formats and languages.)
|
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/hmemcpy/milewski-ctfp-pdf.svg?branch=master)](https://travis-ci.org/hmemcpy/milewski-ctfp-pdf)
|
# Category Theory For Programmers
|
||||||
[(latest CI build)](https://s3.amazonaws.com/milewski-ctfp-pdf/category-theory-for-programmers.pdf)
|
|
||||||
|
|
||||||
<img src="https://user-images.githubusercontent.com/601206/47271389-8eea0900-d581-11e8-8e81-5b932e336336.png"
|
An _unofficial_ PDF version of "**C**ategory **T**heory **F**or **P**rogrammers"
|
||||||
alt="Buy Category Theory for Programmers" width=410 />
|
by [Bartosz Milewski][bartosz github], converted from his [blogpost
|
||||||
**[Available in full-color hardcover print](https://www.blurb.com/b/9621951-category-theory-for-programmers-new-edition-hardco)**
|
series][blogpost series] (_with permission!_).
|
||||||
Publish date: 12 August, 2019. Based off release tag [v1.3.0](https://github.com/hmemcpy/milewski-ctfp-pdf/releases/tag/v1.3.0). See [errata-1.3.0](errata-1.3.0.md) for changes and fixes since print.
|
|
||||||
|
|
||||||
**[Scala Edition is now available in paperback](https://www.blurb.com/b/9603882-category-theory-for-programmers-scala-edition-pape)**
|
![Category Theory for Programmers][ctfp image]
|
||||||
Publish date: 12 August, 2019. Based off release tag [v1.3.0](https://github.com/hmemcpy/milewski-ctfp-pdf/releases/tag/v1.3.0). See [errata-scala](errata-scala.md) for changes and fixes since print.
|
|
||||||
|
|
||||||
This is an *unofficial* PDF version of "Category Theory for Programmers" by Bartosz Milewski, converted from his [blogpost series](https://bartoszmilewski.com/2014/10/28/category-theory-for-programmers-the-preface/) (with permission!)
|
## Buy the book
|
||||||
|
|
||||||
---
|
- **[Standard edition in full-color hardcover
|
||||||
|
print][buy regular edition on blurb]**
|
||||||
|
- Publish date: 12 August, 2019.
|
||||||
|
- Based off release tag [v1.3.0][v1.3.0 github release link]. See
|
||||||
|
[errata-1.3.0](errata-1.3.0.md) for changes and fixes since print.
|
||||||
|
- **[Scala Edition in paperback][buy scala edition on blurb]**
|
||||||
|
- Publish date: 12 August, 2019.
|
||||||
|
- Based off release tag [v1.3.0][v1.3.0 github release link]. See
|
||||||
|
[errata-scala](errata-scala.md) for changes and fixes since print.
|
||||||
|
|
||||||
Building
|
## Build the book
|
||||||
--------
|
|
||||||
|
|
||||||
The best way to build the book is using the [Nix](https://nixos.org/nix/) package manager. After [installing Nix](https://nixos.org/download.html), if you're using a non-NixOS operating system, you need to install `nixFlakes` in your environment following the steps below ([source](https://nixos.wiki/wiki/Flakes#Non-NixOS)):
|
The building workflow requires [Nix][nix website]. After [installing
|
||||||
|
Nix][nix download website], you need to enable the upcoming "flake" feature
|
||||||
|
which must be [enabled manually][nixos wiki flake] the time being. This is
|
||||||
|
needed to expose the new Nix commands and flakes support that are hidden behind
|
||||||
|
feature-flags.
|
||||||
|
|
||||||
```bash
|
Afterwards, type `nix flake show` in the root directory of the project to see
|
||||||
$ nix-env -iA nixpkgs.nixFlakes
|
all the available versions of this book. Then type `nix build .#<edition>` to
|
||||||
```
|
build the edition you want (Haskell, Scala, OCaml, Reason and their printed
|
||||||
|
versions). For example, to build the Scala edition you'll have to type
|
||||||
|
`nix build .#ctfp-scala`.
|
||||||
|
|
||||||
Edit either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf` and add:
|
Upon successful compilation, the PDF file will be placed in the `result`
|
||||||
|
directory.
|
||||||
|
|
||||||
```
|
The command `nix develop` will provide a shell containing all the required
|
||||||
experimental-features = nix-command flakes
|
dependencies to build the book manually using the provided `Makefile`. To build
|
||||||
```
|
the `ctfp-scala` edition, just run `make ctfp-scala`.
|
||||||
|
|
||||||
This is needed to expose the Nix 2.0 CLI and flakes support that are hidden behind feature-flags.
|
## Contribute
|
||||||
|
|
||||||
Also, if the Nix installation is in multi-user mode, don’t forget to restart the nix-daemon.
|
Contributors are welcome to contribute to this book by sending pull-requests.
|
||||||
|
Once reviewed, the changes are merged in the main branch and will be
|
||||||
|
incorporated in the next release.
|
||||||
|
|
||||||
Afterwards, type `nix flake show` in the root directory of the project to see all the available versions of this book. Then type `nix build .#<edition>` to build the edition you want (Haskell, Scala, OCaml, Reason and their printed versions). For example, to build the Scala edition you'll have to type `nix build .#ctfp-scala`.
|
**Note from [Bartosz][bartosz github]**: I really appreciate all your
|
||||||
|
contributions. You made this book much better than I could have imagined. Thank
|
||||||
|
you!
|
||||||
|
|
||||||
Upon successful compilation, the PDF file will be placed in the `result` directory inside the root directory `milewski-ctfp-pdf` of the repository.
|
Find the [list of contributors on Github][contributors].
|
||||||
|
|
||||||
The file `preamble.tex` contains all the configuration and style declarations.
|
## Acknowledgements
|
||||||
|
|
||||||
Acknowledgements
|
PDF LaTeX source and the tools to create it are based on the work by [Andres
|
||||||
----------------
|
Raba][andres raba github]. The book content is taken, with permission, from
|
||||||
|
[Bartosz Milewski][bartosz github]'s blogpost series, and adapted to the LaTeX
|
||||||
|
format.
|
||||||
|
|
||||||
PDF LaTeX source and the tools to create it are based on the work by Andres Raba et al., available here: https://github.com/sarabander/sicp-pdf.
|
The original blog post acknowledgments by Bartosz are consolidated in the
|
||||||
The book content is taken, with permission, from Bartosz Milewski's blogpost series, and adapted to the LaTeX format.
|
_Acknowledgments_ page at the end of the book.
|
||||||
|
|
||||||
Thanks to the following people for contributing corrections/conversions and misc:
|
## License
|
||||||
|
|
||||||
* Oleg Rakitskiy
|
The PDF book, `.tex` files, and associated images and figures in directories
|
||||||
* Jared Weakly
|
`src/fig` and `src/content` are licensed under [Creative Commons
|
||||||
* Paolo G. Giarrusso
|
Attribution-ShareAlike 4.0 International License][license cc by sa].
|
||||||
* Adi Shavit
|
|
||||||
* Mico Loretan
|
|
||||||
* Marcello Seri
|
|
||||||
* Erwin Maruli Tua Pakpahan
|
|
||||||
* Markus Hauck
|
|
||||||
* Yevheniy Zelenskyy
|
|
||||||
* Ross Kirsling
|
|
||||||
* ...and many others!
|
|
||||||
|
|
||||||
The original blog post acknowledgments by Bartosz are consolidated in the *Acknowledgments* page at the end of the book.
|
The script files `scraper.py` and others are licensed under [GNU General Public
|
||||||
|
License version 3][license gnu gpl].
|
||||||
|
|
||||||
**Note from Bartosz**: I really appreciate all your contributions. You made this book much better than I could have imagined. Thank you!
|
[download badge]:
|
||||||
|
https://img.shields.io/badge/Download-latest-green.svg?style=flat-square
|
||||||
License
|
[github actions link]: https://github.com/hmemcpy/milewski-ctfp-pdf/actions
|
||||||
-------
|
[github stars]:
|
||||||
|
https://img.shields.io/github/stars/hmemcpy/milewski-ctfp-pdf.svg?style=flat-square
|
||||||
The PDF book, `.tex` files, and associated images and figures in directories `src/fig` and `src/content` are licensed under Creative Commons Attribution-ShareAlike 4.0 International License ([cc by-sa](http://creativecommons.org/licenses/by-sa/4.0/)).
|
[github workflow status]:
|
||||||
|
https://img.shields.io/github/actions/workflow/status/hmemcpy/milewski-ctfp-pdf/build.yml?branch=master&style=flat-square
|
||||||
The script files `scraper.py` and others are licensed under GNU General Public License version 3 (for details, see [LICENSE](https://github.com/hmemcpy/milewski-ctfp-pdf/blob/master/LICENSE)).
|
[github latest release]:
|
||||||
|
https://github.com/hmemcpy/milewski-ctfp-pdf/releases/latest
|
||||||
|
[license badge]:
|
||||||
|
https://img.shields.io/badge/License-CC_By_SA-green.svg?style=flat-square
|
||||||
|
[ctfp image]:
|
||||||
|
https://user-images.githubusercontent.com/601206/47271389-8eea0900-d581-11e8-8e81-5b932e336336.png
|
||||||
|
[bartosz github]: https://github.com/BartoszMilewski
|
||||||
|
[nixos wiki flake]: https://nixos.wiki/wiki/Flakes
|
||||||
|
[andres raba github]: https://github.com/sarabander
|
||||||
|
[contributors]: https://github.com/hmemcpy/milewski-ctfp-pdf/graphs/contributors
|
||||||
|
[license cc by sa]: https://spdx.org/licenses/CC-BY-SA-4.0.html
|
||||||
|
[license gnu gpl]: https://spdx.org/licenses/GPL-3.0.html
|
||||||
|
[blogpost series]:
|
||||||
|
https://bartoszmilewski.com/2014/10/28/category-theory-for-programmers-the-preface/
|
||||||
|
[buy regular edition on blurb]:
|
||||||
|
https://www.blurb.com/b/9621951-category-theory-for-programmers-new-edition-hardco
|
||||||
|
[buy scala edition on blurb]:
|
||||||
|
https://www.blurb.com/b/9603882-category-theory-for-programmers-scala-edition-pape
|
||||||
|
[v1.3.0 github release link]:
|
||||||
|
https://github.com/hmemcpy/milewski-ctfp-pdf/releases/tag/v1.3.0
|
||||||
|
[nix website]: https://nixos.org/nix/
|
||||||
|
[nix download website]: https://nixos.org/download.html
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
let
|
|
||||||
rev = "cecfd08d13ddef8a79f277e67b8084bd9afa1586";
|
|
||||||
url = "https://github.com/edolstra/flake-compat/archive/${rev}.tar.gz";
|
|
||||||
flake = import (fetchTarball url) { src = ./.; };
|
|
||||||
inNixShell = builtins.getEnv "IN_NIX_SHELL" != "";
|
|
||||||
in if inNixShell then flake.shellNix else flake.defaultNix
|
|
@ -2,82 +2,111 @@
|
|||||||
|
|
||||||
### Preface
|
### Preface
|
||||||
|
|
||||||
* [#155](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/155) - Typo (physicist -> physicists)
|
- [#155](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/155) - Typo
|
||||||
|
(physicist -> physicists)
|
||||||
|
|
||||||
### 6. Simple Algebraic Data Types
|
### 6. Simple Algebraic Data Types
|
||||||
|
|
||||||
* [#176](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/176) - Typo (statements -> statement)
|
- [#176](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/176) - Typo
|
||||||
|
(statements -> statement)
|
||||||
|
|
||||||
### 8. Functoriality
|
### 8. Functoriality
|
||||||
|
|
||||||
* [9a3a5a3](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/9a3a5a386e98ef8f926bccd08f572cc19b1a6367) - added clarifications on bifunctoriality vs. separate functoriality (fix by Bartosz)
|
- [9a3a5a3](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/9a3a5a386e98ef8f926bccd08f572cc19b1a6367) -
|
||||||
|
added clarifications on bifunctoriality vs. separate functoriality (fix by
|
||||||
|
Bartosz)
|
||||||
|
|
||||||
### 9. Function Types
|
### 9. Function Types
|
||||||
|
|
||||||
* [#182](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/182) - Fix typo (chose -> choose)
|
- [#182](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/182) - Fix typo
|
||||||
|
(chose -> choose)
|
||||||
|
|
||||||
### 10. Natural Transformations
|
### 10. Natural Transformations
|
||||||
|
|
||||||
* [#157](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/157) - Adding paragraph indent
|
- [#157](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/157) - Adding
|
||||||
|
paragraph indent
|
||||||
|
|
||||||
### 12. Limits and Colimits
|
### 12. Limits and Colimits
|
||||||
|
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 14. Representable Functors
|
### 14. Representable Functors
|
||||||
|
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 18. Adjunctions
|
### 18. Adjunctions
|
||||||
|
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling of "counit"
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling
|
||||||
|
of "counit"
|
||||||
|
|
||||||
### 19. Free/Forgetful Adjunctions
|
### 19. Free/Forgetful Adjunctions
|
||||||
|
|
||||||
* [#156](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/156) - an instance of the category name **Mon** is appearing as **arg**
|
- [#156](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/156) - an instance of
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling of "isomorphism"
|
the category name **Mon** is appearing as **arg**
|
||||||
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling
|
||||||
|
of "isomorphism"
|
||||||
|
|
||||||
### 20. Monads - Programmer's Definition
|
### 20. Monads - Programmer's Definition
|
||||||
|
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix grammatical error
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
grammatical error
|
||||||
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 22. Monads Categorically
|
### 22. Monads Categorically
|
||||||
|
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 23. Comonads
|
### 23. Comonads
|
||||||
|
|
||||||
* [#158](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/158) - fixed incorrect typesetting of `set`
|
- [#158](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/158) - fixed
|
||||||
* [23f522e](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/23f522ec083c2c98f28f15935ff2893ccd1fa76c) - adjusted `Prod`/`Product` names (fix by Bartosz)
|
incorrect typesetting of `set`
|
||||||
|
- [23f522e](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/23f522ec083c2c98f28f15935ff2893ccd1fa76c) -
|
||||||
|
adjusted `Prod`/`Product` names (fix by Bartosz)
|
||||||
|
|
||||||
### 25. Algebras for Monads
|
### 25. Algebras for Monads
|
||||||
|
|
||||||
* [#158](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/158) - fixed incorrect typesetting of `set`
|
- [#158](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/158) - fixed
|
||||||
* [#159](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/159) - fixed incorrect typesetting of category terms
|
incorrect typesetting of `set`
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling of "counit" and "morphisms", fix subscript spacing
|
- [#159](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/159) - fixed
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical errors
|
incorrect typesetting of category terms
|
||||||
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling
|
||||||
|
of "counit" and "morphisms", fix subscript spacing
|
||||||
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical errors
|
||||||
|
|
||||||
### 26. Ends and Coends
|
### 26. Ends and Coends
|
||||||
|
|
||||||
* [#159](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/159) - fixed incorrect typesetting of category terms
|
- [#159](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/159) - fixed
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling of "coequalizer", fix subscript spacing
|
incorrect typesetting of category terms
|
||||||
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling
|
||||||
|
of "coequalizer", fix subscript spacing
|
||||||
|
|
||||||
### 27. Kan Extensions
|
### 27. Kan Extensions
|
||||||
|
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix subscript spacing
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix subscript
|
||||||
* [31821e5](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/31821e5ded0dacf059e1fcb985be406e8a495107) - postcomposition -> precomposition (fix by Bartosz)
|
spacing
|
||||||
|
- [31821e5](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/31821e5ded0dacf059e1fcb985be406e8a495107) -
|
||||||
|
postcomposition -> precomposition (fix by Bartosz)
|
||||||
|
|
||||||
### 28. Enriched Categories
|
### 28. Enriched Categories
|
||||||
|
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix subscript spacing
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix subscript
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
spacing
|
||||||
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 29. Topoi
|
### 29. Topoi
|
||||||
|
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical error
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical error
|
||||||
|
|
||||||
### 30. Lawvere Theories
|
### 30. Lawvere Theories
|
||||||
|
|
||||||
* [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling of "coequalizer"
|
- [#160](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/160) - Fix spelling
|
||||||
* [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix grammatical errors and a typesetting error
|
of "coequalizer"
|
||||||
|
- [#162](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/162) - Fix
|
||||||
|
grammatical errors and a typesetting error
|
||||||
|
@ -1,17 +1,27 @@
|
|||||||
## A list of typos/mistakes that were fixed after the release of the new edition (1.3.0) (12 August, 2019).
|
## A list of typos/mistakes that were fixed after the release of the new edition (1.3.0) (12 August, 2019).
|
||||||
(see errata for the original edition until 1.3.0 [here](https://github.com/hmemcpy/milewski-ctfp-pdf/blob/master/errata-1.0.0.md))
|
|
||||||
|
(see errata for the original edition until 1.3.0
|
||||||
|
[here](https://github.com/hmemcpy/milewski-ctfp-pdf/blob/master/errata-1.0.0.md))
|
||||||
|
|
||||||
### Preface
|
### Preface
|
||||||
|
|
||||||
* [#278](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/278) - Fixed reference to Saunders Mac Lane's *Categories for the Working Mathematician*. Was previously misreferenced as "*Category Theory* for the Working Mathematician."
|
- [#278](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/278) - Fixed
|
||||||
|
reference to Saunders Mac Lane's _Categories for the Working Mathematician_.
|
||||||
|
Was previously misreferenced as "_Category Theory_ for the Working
|
||||||
|
Mathematician."
|
||||||
|
|
||||||
### 12. Limits and Colimits
|
### 12. Limits and Colimits
|
||||||
|
|
||||||
* [#278](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/278) - Fixed formatting of quotation marks around "selecting." Were previously pointing the wrong direction.
|
- [#278](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/278) - Fixed
|
||||||
|
formatting of quotation marks around "selecting." Were previously pointing the
|
||||||
|
wrong direction.
|
||||||
|
|
||||||
### 18. Adjunctions
|
### 18. Adjunctions
|
||||||
|
|
||||||
* [#228](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/228) - Typo (adjuncion -> adjunction)
|
- [#228](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/228) - Typo
|
||||||
|
(adjuncion -> adjunction)
|
||||||
|
|
||||||
### 30. Lawvere Theories
|
### 30. Lawvere Theories
|
||||||
|
|
||||||
* [#226](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/226) - fix type in diagram of monads as coends
|
- [#226](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/226) - fix type in
|
||||||
|
diagram of monads as coends
|
||||||
|
@ -2,8 +2,12 @@
|
|||||||
|
|
||||||
### 7. Functors
|
### 7. Functors
|
||||||
|
|
||||||
* [3d29cd9](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/3d29cd99f34ce1205ed9a68aeae038d9d47c7145) - Added `LazyList` example, supported since Scala 2.13
|
- [3d29cd9](https://github.com/hmemcpy/milewski-ctfp-pdf/commit/3d29cd99f34ce1205ed9a68aeae038d9d47c7145) -
|
||||||
|
Added `LazyList` example, supported since Scala 2.13
|
||||||
|
|
||||||
* [#210](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/210) - Section 6.4 - `prodToSum` snippet. Explicitly Tupling return type to avoid adapted args warning, which is deprecated behavior
|
- [#210](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/210) - Section 6.4 -
|
||||||
|
`prodToSum` snippet. Explicitly Tupling return type to avoid adapted args
|
||||||
|
warning, which is deprecated behavior
|
||||||
|
|
||||||
* [#243](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/243) - Section 8.7 - Change bimap to dimap in Profunctor definition
|
- [#243](https://github.com/hmemcpy/milewski-ctfp-pdf/pull/243) - Section 8.7 -
|
||||||
|
Change bimap to dimap in Profunctor definition
|
||||||
|
62
flake.lock
62
flake.lock
@ -1,40 +1,60 @@
|
|||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
|
"flake-parts": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs-lib": "nixpkgs-lib"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1674771137,
|
||||||
|
"narHash": "sha256-Zpk1GbEsYrqKmuIZkx+f+8pU0qcCYJoSUwNz1Zk+R00=",
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "flake-parts",
|
||||||
|
"rev": "7c7a8bce3dffe71203dcd4276504d1cb49dfe05f",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "flake-parts",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1645379616,
|
"lastModified": 1675153841,
|
||||||
"narHash": "sha256-eLzR3MRS9hcNqSWZxP6BP7xiBjgC3/pB5n2Q0lLFe/g=",
|
"narHash": "sha256-EWvU3DLq+4dbJiukfhS7r6sWZyJikgXn6kNl7eHljW8=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "40ef692a55b188b1f5ae3967f3fc7808838c3f1d",
|
"rev": "ea692c2ad1afd6384e171eabef4f0887d2b882d3",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-21.11",
|
"ref": "nixpkgs-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-lib": {
|
||||||
|
"locked": {
|
||||||
|
"dir": "lib",
|
||||||
|
"lastModified": 1672350804,
|
||||||
|
"narHash": "sha256-jo6zkiCabUBn3ObuKXHGqqORUMH27gYDIFFfLq5P4wg=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "677ed08a50931e38382dbef01cba08a8f7eac8f6",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"dir": "lib",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-unstable",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": "nixpkgs",
|
"flake-parts": "flake-parts",
|
||||||
"utils": "utils"
|
"nixpkgs": "nixpkgs"
|
||||||
}
|
|
||||||
},
|
|
||||||
"utils": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1644229661,
|
|
||||||
"narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
206
flake.nix
206
flake.nix
@ -1,75 +1,124 @@
|
|||||||
{
|
{
|
||||||
description = "Category Theory for Programmers";
|
description = "Category Theory for Programmers";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-21.11";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
|
||||||
inputs.utils.url = "github:numtide/flake-utils";
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs, utils }: utils.lib.eachDefaultSystem (system: let
|
outputs = inputs @ {
|
||||||
|
self,
|
||||||
|
flake-parts,
|
||||||
|
nixpkgs,
|
||||||
|
}:
|
||||||
|
flake-parts.lib.mkFlake {inherit inputs;} {
|
||||||
|
systems = [
|
||||||
|
"x86_64-linux"
|
||||||
|
"x86_64-darwin"
|
||||||
|
"aarch64-linux"
|
||||||
|
"aarch64-darwin"
|
||||||
|
];
|
||||||
|
|
||||||
|
perSystem = {
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
system,
|
||||||
|
...
|
||||||
|
}: let
|
||||||
inherit (nixpkgs) lib;
|
inherit (nixpkgs) lib;
|
||||||
|
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
# LaTeX font
|
||||||
|
inconsolata-lgc-latex = pkgs.stdenvNoCC.mkDerivation {
|
||||||
|
name = "inconsolata-lgc-latex";
|
||||||
|
pname = "inconsolata-lgc-latex";
|
||||||
|
|
||||||
|
src = pkgs.inconsolata-lgc;
|
||||||
|
|
||||||
|
dontConfigure = true;
|
||||||
|
sourceRoot = ".";
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
find $src -name '*.ttf' -exec install -m644 -Dt $out/fonts/truetype/public/inconsolata-lgc/ {} \;
|
||||||
|
find $src -name '*.otf' -exec install -m644 -Dt $out/fonts/opentype/public/inconsolata-lgc/ {} \;
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
tlType = "run";
|
||||||
|
};
|
||||||
|
|
||||||
###########################################################################
|
###########################################################################
|
||||||
# LaTeX Environment
|
# LaTeX Environment
|
||||||
texliveEnv = pkgs.texlive.combine {
|
texliveEnv = pkgs.texlive.combine {
|
||||||
inherit (pkgs.texlive)
|
inherit
|
||||||
bookcover
|
(pkgs.texlive)
|
||||||
textpos
|
|
||||||
fgruler
|
|
||||||
tcolorbox
|
|
||||||
fvextra
|
|
||||||
framed
|
|
||||||
newtx
|
|
||||||
nowidow
|
|
||||||
emptypage
|
|
||||||
wrapfig
|
|
||||||
subfigure
|
|
||||||
adjustbox
|
adjustbox
|
||||||
collectbox
|
alegreya
|
||||||
tikz-cd
|
|
||||||
imakeidx
|
|
||||||
idxlayout
|
|
||||||
titlesec
|
|
||||||
subfiles
|
|
||||||
lettrine
|
|
||||||
upquote
|
|
||||||
libertine
|
|
||||||
mweights
|
|
||||||
fontaxes
|
|
||||||
mdframed
|
|
||||||
needspace
|
|
||||||
xifthen
|
|
||||||
ifnextok
|
|
||||||
currfile
|
|
||||||
noindentafter
|
|
||||||
ifmtarg
|
|
||||||
scheme-medium
|
|
||||||
listings
|
|
||||||
minted
|
|
||||||
microtype
|
|
||||||
babel
|
babel
|
||||||
todonotes
|
bookcover
|
||||||
|
catchfile
|
||||||
chngcntr
|
chngcntr
|
||||||
ifplatform
|
collectbox
|
||||||
xstring
|
currfile
|
||||||
minifp
|
emptypage
|
||||||
titlecaps
|
|
||||||
enumitem
|
enumitem
|
||||||
environ
|
environ
|
||||||
trimspaces
|
fgruler
|
||||||
l3packages
|
fontaxes
|
||||||
zref
|
framed
|
||||||
catchfile
|
fvextra
|
||||||
|
idxlayout
|
||||||
|
ifmtarg
|
||||||
|
ifnextok
|
||||||
|
ifplatform
|
||||||
|
imakeidx
|
||||||
import
|
import
|
||||||
|
inconsolata
|
||||||
|
l3packages
|
||||||
|
lettrine
|
||||||
|
libertine
|
||||||
|
libertinus-fonts
|
||||||
|
listings
|
||||||
|
mdframed
|
||||||
|
microtype
|
||||||
|
minifp
|
||||||
|
minted
|
||||||
|
mweights
|
||||||
|
needspace
|
||||||
|
newtx
|
||||||
|
noindentafter
|
||||||
|
nowidow
|
||||||
|
scheme-medium
|
||||||
|
subfigure
|
||||||
|
subfiles
|
||||||
|
textpos
|
||||||
|
tcolorbox
|
||||||
|
tikz-cd
|
||||||
|
titlecaps
|
||||||
|
titlesec
|
||||||
|
todonotes
|
||||||
|
trimspaces
|
||||||
|
upquote
|
||||||
|
wrapfig
|
||||||
|
xifthen
|
||||||
|
xpatch
|
||||||
|
xstring
|
||||||
|
zref
|
||||||
;
|
;
|
||||||
|
|
||||||
|
inconsolata-lgc-latex = {
|
||||||
|
pkgs = [inconsolata-lgc-latex];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
###########################################################################
|
###########################################################################
|
||||||
# Python Environment
|
# Python Environment
|
||||||
|
|
||||||
# Pin the Python version and its associated package set in a single place.
|
# Pin the Python version and its associated package set in a single place.
|
||||||
python = pkgs.python38;
|
python = pkgs.python311;
|
||||||
pythonPkgs = pkgs.python38Packages;
|
pythonPkgs = pkgs.python311Packages;
|
||||||
|
|
||||||
pygments-style-github = pythonPkgs.buildPythonPackage rec {
|
pygments-style-github = pythonPkgs.buildPythonPackage rec {
|
||||||
pname = "pygments-style-github";
|
pname = "pygments-style-github";
|
||||||
@ -84,39 +133,41 @@
|
|||||||
|
|
||||||
# Anything depending on this derivation is probably also gonna want
|
# Anything depending on this derivation is probably also gonna want
|
||||||
# pygments to be available.
|
# pygments to be available.
|
||||||
propagatedBuildInputs = with pythonPkgs; [ pygments ];
|
propagatedBuildInputs = with pythonPkgs; [pygments];
|
||||||
};
|
};
|
||||||
|
|
||||||
pythonEnv = python.withPackages (p: [ p.pygments pygments-style-github ]);
|
pythonEnv = python.withPackages (p: [p.pygments pygments-style-github]);
|
||||||
|
|
||||||
commonAttrs = {
|
commonAttrs = {
|
||||||
nativeBuildInputs = [ texliveEnv pythonEnv pkgs.which ];
|
nativeBuildInputs = [texliveEnv pythonEnv pkgs.which];
|
||||||
FONTCONFIG_FILE = pkgs.makeFontsConf {
|
|
||||||
fontDirectories = with pkgs; [ inconsolata-lgc libertine libertinus ];
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
mkLatex = variant: edition: let
|
mkLatex = variant: edition: let
|
||||||
maybeVariant = lib.optionalString (variant != null) "-${variant}";
|
maybeVariant = lib.optionalString (variant != null) "-${variant}";
|
||||||
maybeEdition = lib.optionalString (edition != null) "-${edition}";
|
maybeEdition = lib.optionalString (edition != null) "-${edition}";
|
||||||
variantStr = if variant == null then "reader" else variant;
|
variantStr =
|
||||||
|
if variant == null
|
||||||
|
then "reader"
|
||||||
|
else variant;
|
||||||
suffix = maybeVariant + maybeEdition;
|
suffix = maybeVariant + maybeEdition;
|
||||||
basename = "ctfp-${variantStr}${maybeEdition}";
|
basename = "ctfp-${variantStr}${maybeEdition}";
|
||||||
version = self.shortRev or self.lastModifiedDate;
|
version = self.shortRev or self.lastModifiedDate;
|
||||||
in pkgs.stdenv.mkDerivation (commonAttrs // {
|
in
|
||||||
|
pkgs.stdenv.mkDerivation (commonAttrs
|
||||||
|
// {
|
||||||
inherit basename version;
|
inherit basename version;
|
||||||
|
|
||||||
name = "ctfp${suffix}-${version}";
|
name = "ctfp${suffix}";
|
||||||
fullname = "ctfp${suffix}";
|
fullname = "ctfp${suffix}";
|
||||||
src = "${self}/src";
|
src = "${self}/src";
|
||||||
|
|
||||||
configurePhase = ''
|
configurePhase = ''
|
||||||
echo -n "\\newcommand{\\OPTversion}{$version}" > version.tex
|
substituteInPlace "version.tex" --replace "dev" "${version}"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
buildPhase = ''
|
buildPhase = ''
|
||||||
latexmk -shell-escape -interaction=nonstopmode -halt-on-error \
|
latexmk -file-line-error -shell-escape -logfilewarninglist -interaction=nonstopmode -halt-on-error \
|
||||||
-norc -jobname=ctfp -pdflatex="xelatex %O %S" -pdf "$basename.tex"
|
-norc -jobname=ctfp -pdflatex="xelatex %O %S" -pdfxe "$basename.tex"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
installPhase = "install -m 0644 -vD ctfp.pdf \"$out/$fullname.pdf\"";
|
installPhase = "install -m 0644 -vD ctfp.pdf \"$out/$fullname.pdf\"";
|
||||||
@ -124,26 +175,29 @@
|
|||||||
passthru.packageName = "ctfp${suffix}";
|
passthru.packageName = "ctfp${suffix}";
|
||||||
});
|
});
|
||||||
|
|
||||||
editions = [ null "scala" "ocaml" "reason" ];
|
editions = [null "scala" "ocaml" "reason"];
|
||||||
variants = [ null "print" ];
|
variants = [null "print"];
|
||||||
in {
|
in rec {
|
||||||
# nix build .#ctfp
|
formatter = pkgs.alejandra;
|
||||||
# nix build .#ctfp-print
|
|
||||||
# nix build .#ctfp-print-ocaml
|
packages = lib.listToAttrs (lib.concatMap (variant:
|
||||||
# etc etc
|
map (edition: rec {
|
||||||
packages = lib.listToAttrs (lib.concatMap (variant: map (edition: rec {
|
|
||||||
name = value.packageName;
|
name = value.packageName;
|
||||||
value = mkLatex variant edition;
|
value = mkLatex variant edition;
|
||||||
}) editions) variants);
|
})
|
||||||
|
editions)
|
||||||
# nix build .
|
variants);
|
||||||
defaultPackage = self.packages.${system}.ctfp;
|
|
||||||
|
|
||||||
# nix develop .
|
# nix develop .
|
||||||
devShell = pkgs.mkShell (commonAttrs // {
|
devShells.default = pkgs.mkShellNoCC (commonAttrs
|
||||||
nativeBuildInputs = commonAttrs.nativeBuildInputs ++ [
|
// {
|
||||||
pkgs.git pkgs.gnumake
|
nativeBuildInputs =
|
||||||
|
commonAttrs.nativeBuildInputs
|
||||||
|
++ [
|
||||||
|
pkgs.git
|
||||||
|
pkgs.gnumake
|
||||||
];
|
];
|
||||||
});
|
});
|
||||||
});
|
};
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
12
shell.nix
12
shell.nix
@ -1,12 +0,0 @@
|
|||||||
(import
|
|
||||||
(
|
|
||||||
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
|
|
||||||
fetchTarball {
|
|
||||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
|
||||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
|
||||||
}
|
|
||||||
)
|
|
||||||
{
|
|
||||||
src = ./.;
|
|
||||||
}
|
|
||||||
).shellNix
|
|
17
src/.gitignore
vendored
17
src/.gitignore
vendored
@ -1,17 +0,0 @@
|
|||||||
*.aux
|
|
||||||
*.cp*
|
|
||||||
*.fn
|
|
||||||
*.ky
|
|
||||||
*.log
|
|
||||||
*.pg
|
|
||||||
*.toc
|
|
||||||
*.tp
|
|
||||||
*.vr
|
|
||||||
*.vim
|
|
||||||
*.idx
|
|
||||||
*.ilg
|
|
||||||
*.ind
|
|
||||||
*.out
|
|
||||||
*.swp
|
|
||||||
*~
|
|
||||||
ctfp.fdb_latexmk
|
|
85
src/Makefile
85
src/Makefile
@ -1,85 +0,0 @@
|
|||||||
# Igal Tabachnik, 2007.
|
|
||||||
# Based on work by Andres Raba et al., 2013-2015.
|
|
||||||
|
|
||||||
.PHONY: default all clean out-dir version.tex scala ocaml reason
|
|
||||||
|
|
||||||
DIR := $(shell pwd)
|
|
||||||
GIT_VER := $(shell git describe --tags --always --long | tr -d '\n')
|
|
||||||
OUTPUT_DIR := ../out
|
|
||||||
|
|
||||||
OUTPUT = category-theory-for-programmers
|
|
||||||
|
|
||||||
# Default top-level LaTeX to generate
|
|
||||||
DEFAULTTOPTEX = ctfp-reader.tex ctfp-print.tex
|
|
||||||
|
|
||||||
SCALATEXFILES = ctfp-reader-scala.tex ctfp-print-scala.tex # todo make this a macro
|
|
||||||
|
|
||||||
OCAMLTEXFILES = ctfp-reader-ocaml.tex ctfp-print-ocaml.tex # todo make this a macro
|
|
||||||
|
|
||||||
REASONTEXFILES = ctfp-reader-reason.tex ctfp-print-reason.tex # todo make this a macro
|
|
||||||
|
|
||||||
# Top-level LaTeX files from which CTFP book can be generated
|
|
||||||
TOPTEXFILES = version.tex $(DEFAULTTOPTEX) $(SCALATEXFILES) $(OCAMLTEXFILES) $(REASONTEXFILES)
|
|
||||||
|
|
||||||
# Default PDF file to make
|
|
||||||
DEFAULTPDF:=$(DEFAULTTOPTEX:.tex=.pdf)
|
|
||||||
|
|
||||||
# Scala PDF file to make
|
|
||||||
SCALAPDF:=$(SCALATEXFILES:.tex=.pdf)
|
|
||||||
|
|
||||||
# OCaml PDF file to make
|
|
||||||
OCAMLPDF:=$(OCAMLTEXFILES:.tex=.pdf)
|
|
||||||
|
|
||||||
# ReasonML PDF file to make
|
|
||||||
REASONPDF:=$(REASONTEXFILES:.tex=.pdf)
|
|
||||||
|
|
||||||
# Other PDF files for the CTFP book
|
|
||||||
TOPPDFFILES:=$(TOPTEXFILES:.tex=.pdf)
|
|
||||||
|
|
||||||
# Configuration files
|
|
||||||
OPTFILES = opt-print-ustrade.tex \
|
|
||||||
opt-reader-10in.tex \
|
|
||||||
opt-scala.tex
|
|
||||||
|
|
||||||
# All the LaTeX files for the CTFP book in order of dependency
|
|
||||||
TEXFILES = $(TOPTEXFILES) $(SCALATEXFILES) $(OCAMLTEXFILES) $(REASONTEXFILES) $(OPTFILES)
|
|
||||||
|
|
||||||
default: suffix=''
|
|
||||||
default: out-dir $(DEFAULTPDF) # todo cover
|
|
||||||
|
|
||||||
all: clean default scala ocaml reason
|
|
||||||
|
|
||||||
scala: suffix='-scala'
|
|
||||||
scala: clean out-dir version.tex $(SCALAPDF)
|
|
||||||
|
|
||||||
ocaml: suffix='-ocaml'
|
|
||||||
ocaml: clean out-dir version.tex $(OCAMLPDF)
|
|
||||||
|
|
||||||
reason: suffix='-reason'
|
|
||||||
reason: clean out-dir version.tex $(REASONPDF)
|
|
||||||
|
|
||||||
# Main targets
|
|
||||||
$(TOPPDFFILES) : %.pdf : %.tex $(TEXFILES)
|
|
||||||
if which latexmk > /dev/null 2>&1 ;\
|
|
||||||
then \
|
|
||||||
latexmk -shell-escape -interaction=nonstopmode -halt-on-error -norc -jobname=ctfp -pdflatex="xelatex %O %S" -pdf $< ;\
|
|
||||||
mv ctfp.pdf $(OUTPUT_DIR)/$(subst ctfp,$(OUTPUT),$(subst ctfp-reader,$(OUTPUT),$*)).pdf ;\
|
|
||||||
else @printf "Error: unable to find latexmk. Is it installed?\n" ;\
|
|
||||||
fi
|
|
||||||
|
|
||||||
version.tex:
|
|
||||||
@printf '\\newcommand{\\OPTversion}{' > version.tex
|
|
||||||
@printf $(GIT_VER) >> version.tex
|
|
||||||
@printf '}' >> version.tex
|
|
||||||
|
|
||||||
out-dir:
|
|
||||||
@printf 'Creating output directory: $(OUTPUT_DIR)\n'
|
|
||||||
mkdir -p $(OUTPUT_DIR)
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -f *~ *.aux {ctfp-*}.{out,log,pdf,dvi,fls,fdb_latexmk,aux,brf,bbl,idx,ilg,ind,toc,sed}
|
|
||||||
if which latexmk > /dev/null 2>&1 ; then latexmk -CA; fi
|
|
||||||
rm -rf ../out
|
|
||||||
|
|
||||||
clean-minted:
|
|
||||||
rm -rf _minted-*
|
|
@ -1,11 +1,11 @@
|
|||||||
\newcommand{\cat}{%
|
\newcommand{\cat}{%
|
||||||
\symbf%
|
\symbf%
|
||||||
}
|
}
|
||||||
\newcommand{\idarrow}[1][]{%
|
\newcommand{\idarrow}[1][]{%
|
||||||
\mathbf{id}_{#1}%
|
\mathbf{id}_{#1}%
|
||||||
}
|
}
|
||||||
\newcommand{\Lim}[1][]{%
|
\newcommand{\Lim}[1][]{%
|
||||||
\mathbf{Lim}{#1}%
|
\mathbf{Lim}{#1}%
|
||||||
}
|
}
|
||||||
\newcommand{\Set}{\cat{Set}}
|
\newcommand{\Set}{\cat{Set}}
|
||||||
\newcommand{\Rel}{\cat{Rel}}
|
\newcommand{\Rel}{\cat{Rel}}
|
||||||
|
@ -11,5 +11,5 @@ Original book layout design and typography are done by Andres Raba. Syntax highl
|
|||||||
\urlref{https://github.com/hugomaiavieira/pygments-style-github}{Hugo Maia Vieira}.
|
\urlref{https://github.com/hugomaiavieira/pygments-style-github}{Hugo Maia Vieira}.
|
||||||
\ifdefined\OPTCustomLanguage{%
|
\ifdefined\OPTCustomLanguage{%
|
||||||
\input{content/\OPTCustomLanguage/colophon}
|
\input{content/\OPTCustomLanguage/colophon}
|
||||||
}
|
}
|
||||||
\fi
|
\fi
|
@ -1,20 +1,20 @@
|
|||||||
% !TEX root = ../../ctfp-print.tex
|
% !TEX root = ../../ctfp-print.tex
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
For some time now I've been floating the idea of writing a book about
|
For some time now I've been floating the idea of writing a book about
|
||||||
category theory that would be targeted at programmers. Mind you, not
|
category theory that would be targeted at programmers. Mind you, not
|
||||||
computer scientists but programmers --- engineers rather than
|
computer scientists but programmers --- engineers rather than
|
||||||
scientists. I know this sounds crazy and I am properly scared. I can't
|
scientists. I know this sounds crazy and I am properly scared. I can't
|
||||||
deny that there is a huge gap between science and engineering because I
|
deny that there is a huge gap between science and engineering because I
|
||||||
have worked on both sides of the divide. But I've always felt a very
|
have worked on both sides of the divide. But I've always felt a very
|
||||||
strong compulsion to explain things. I have tremendous admiration for
|
strong compulsion to explain things. I have tremendous admiration for
|
||||||
Richard Feynman who was the master of simple explanations. I know I'm no
|
Richard Feynman who was the master of simple explanations. I know I'm no
|
||||||
Feynman, but I will try my best. I'm starting by publishing this preface
|
Feynman, but I will try my best. I'm starting by publishing this preface
|
||||||
--- which is supposed to motivate the reader to learn category theory
|
--- which is supposed to motivate the reader to learn category theory
|
||||||
--- in hopes of starting a discussion and soliciting feedback.\footnote{
|
--- in hopes of starting a discussion and soliciting feedback.\footnote{
|
||||||
You may also watch me teach this material to a live audience, at
|
You may also watch me teach this material to a live audience, at
|
||||||
\href{https://goo.gl/GT2UWU}{https://goo.gl/GT2UWU} (or search
|
\href{https://goo.gl/GT2UWU}{https://goo.gl/GT2UWU} (or search
|
||||||
``bartosz milewski category theory'' on YouTube.)}
|
``bartosz milewski category theory'' on YouTube.)}
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\lettrine[lhang=0.17]{I}{will attempt}, in the space of a few paragraphs,
|
\lettrine[lhang=0.17]{I}{will attempt}, in the space of a few paragraphs,
|
||||||
@ -64,7 +64,7 @@ Of course when using hand-waving arguments you run the risk of saying
|
|||||||
something blatantly wrong, so I will try to make sure that there is
|
something blatantly wrong, so I will try to make sure that there is
|
||||||
solid mathematical theory behind informal arguments in this book. I do
|
solid mathematical theory behind informal arguments in this book. I do
|
||||||
have a worn-out copy of Saunders Mac Lane's \emph{Categories for
|
have a worn-out copy of Saunders Mac Lane's \emph{Categories for
|
||||||
the Working Mathematician} on my nightstand.
|
the Working Mathematician} on my nightstand.
|
||||||
|
|
||||||
Since this is category theory \emph{for programmers} I will illustrate
|
Since this is category theory \emph{for programmers} I will illustrate
|
||||||
all major concepts using computer code. You are probably aware that
|
all major concepts using computer code. You are probably aware that
|
||||||
@ -102,8 +102,8 @@ the position of a frog that must decide if it should continue swimming
|
|||||||
in increasingly hot water, or start looking for some alternatives.
|
in increasingly hot water, or start looking for some alternatives.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/img_1299.jpg}
|
\includegraphics[width=0.5\textwidth]{images/img_1299.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -136,7 +136,7 @@ us to rethink the foundations of programming. Just like the builders of
|
|||||||
Europe's great gothic cathedrals we've been honing our craft to the
|
Europe's great gothic cathedrals we've been honing our craft to the
|
||||||
limits of material and structure. There is an unfinished gothic
|
limits of material and structure. There is an unfinished gothic
|
||||||
\urlref{http://en.wikipedia.org/wiki/Beauvais_Cathedral}{cathedral in
|
\urlref{http://en.wikipedia.org/wiki/Beauvais_Cathedral}{cathedral in
|
||||||
Beauvais}, France, that stands witness to this deeply human struggle
|
Beauvais}, France, that stands witness to this deeply human struggle
|
||||||
with limitations. It was intended to beat all previous records of height
|
with limitations. It was intended to beat all previous records of height
|
||||||
and lightness, but it suffered a series of collapses. Ad hoc measures
|
and lightness, but it suffered a series of collapses. Ad hoc measures
|
||||||
like iron rods and wooden supports keep it from disintegrating, but
|
like iron rods and wooden supports keep it from disintegrating, but
|
||||||
@ -151,7 +151,7 @@ all this based on very flimsy theoretical foundations. We have to fix
|
|||||||
those foundations if we want to move forward.
|
those foundations if we want to move forward.
|
||||||
|
|
||||||
\begin{figure}
|
\begin{figure}
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[totalheight=0.5\textheight]{images/beauvais_interior_supports.jpg}
|
\includegraphics[totalheight=0.5\textheight]{images/beauvais_interior_supports.jpg}
|
||||||
\caption{Ad hoc measures preventing the Beauvais cathedral from collapsing.}
|
\caption{Ad hoc measures preventing the Beauvais cathedral from collapsing.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
@ -12,11 +12,11 @@ object $B$ to object $C$, then there must be an arrow --- their composition
|
|||||||
--- that goes from $A$ to $C$.
|
--- that goes from $A$ to $C$.
|
||||||
|
|
||||||
\begin{figure}
|
\begin{figure}
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.8\textwidth]{images/img_1330.jpg}
|
\includegraphics[width=0.8\textwidth]{images/img_1330.jpg}
|
||||||
\caption{In a category, if there is an arrow going from $A$ to $B$ and an arrow going from $B$ to $C$
|
\caption{In a category, if there is an arrow going from $A$ to $B$ and an arrow going from $B$ to $C$
|
||||||
then there must also be a direct arrow from $A$ to $C$ that is their composition. This diagram is not a full
|
then there must also be a direct arrow from $A$ to $C$ that is their composition. This diagram is not a full
|
||||||
category because it’s missing identity morphisms (see later).}
|
category because it’s missing identity morphisms (see later).}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\section{Arrows as Functions}
|
\section{Arrows as Functions}
|
||||||
@ -100,30 +100,30 @@ There are two extremely important properties that the composition in any
|
|||||||
category must satisfy.
|
category must satisfy.
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\item
|
\item
|
||||||
Composition is associative. If you have three morphisms, $f$, $g$, and $h$,
|
Composition is associative. If you have three morphisms, $f$, $g$, and $h$,
|
||||||
that can be composed (that is, their objects match end-to-end), you
|
that can be composed (that is, their objects match end-to-end), you
|
||||||
don't need parentheses to compose them. In math notation this is
|
don't need parentheses to compose them. In math notation this is
|
||||||
expressed as:
|
expressed as:
|
||||||
\[h \circ (g \circ f) = (h \circ g) \circ f = h \circ g \circ f\]
|
\[h \circ (g \circ f) = (h \circ g) \circ f = h \circ g \circ f\]
|
||||||
In (pseudo) Haskell:
|
In (pseudo) Haskell:
|
||||||
|
|
||||||
\src{snippet04}[b]
|
\src{snippet04}[b]
|
||||||
(I said ``pseudo,'' because equality is not defined for functions.)
|
(I said ``pseudo,'' because equality is not defined for functions.)
|
||||||
|
|
||||||
Associativity is pretty obvious when dealing with functions, but it may
|
Associativity is pretty obvious when dealing with functions, but it may
|
||||||
be not as obvious in other categories.
|
be not as obvious in other categories.
|
||||||
|
|
||||||
\item
|
\item
|
||||||
For every object $A$ there is an arrow which is a unit of composition.
|
For every object $A$ there is an arrow which is a unit of composition.
|
||||||
This arrow loops from the object to itself. Being a unit of composition
|
This arrow loops from the object to itself. Being a unit of composition
|
||||||
means that, when composed with any arrow that either starts at $A$ or ends
|
means that, when composed with any arrow that either starts at $A$ or ends
|
||||||
at $A$, respectively, it gives back the same arrow. The unit arrow for
|
at $A$, respectively, it gives back the same arrow. The unit arrow for
|
||||||
object A is called $\idarrow[A]$ (\newterm{identity} on $A$). In math
|
object A is called $\idarrow[A]$ (\newterm{identity} on $A$). In math
|
||||||
notation, if $f$ goes from $A$ to $B$ then
|
notation, if $f$ goes from $A$ to $B$ then
|
||||||
\[f \circ \idarrow[A] = f\]
|
\[f \circ \idarrow[A] = f\]
|
||||||
and
|
and
|
||||||
\[\idarrow[B] \circ f = f\]
|
\[\idarrow[B] \circ f = f\]
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
When dealing with functions, the identity arrow is implemented as the
|
When dealing with functions, the identity arrow is implemented as the
|
||||||
identity function that just returns back its argument. The
|
identity function that just returns back its argument. The
|
||||||
@ -209,7 +209,7 @@ imposed on us by computers. It reflects the limitations of the human
|
|||||||
mind. Our brains can only deal with a small number of concepts at a
|
mind. Our brains can only deal with a small number of concepts at a
|
||||||
time. One of the most cited papers in psychology,
|
time. One of the most cited papers in psychology,
|
||||||
\urlref{http://en.wikipedia.org/wiki/The_Magical_Number_Seven,_Plus_or_Minus_Two}{The
|
\urlref{http://en.wikipedia.org/wiki/The_Magical_Number_Seven,_Plus_or_Minus_Two}{The
|
||||||
Magical Number Seven, Plus or Minus Two}, postulated that we can only
|
Magical Number Seven, Plus or Minus Two}, postulated that we can only
|
||||||
keep $7 \pm 2$ ``chunks'' of information in our minds. The details of our
|
keep $7 \pm 2$ ``chunks'' of information in our minds. The details of our
|
||||||
understanding of the human short-term memory might be changing, but we
|
understanding of the human short-term memory might be changing, but we
|
||||||
know for sure that it's limited. The bottom line is that we are unable
|
know for sure that it's limited. The bottom line is that we are unable
|
||||||
@ -251,23 +251,23 @@ advantages of your programming paradigm.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Implement, as best as you can, the identity function in your favorite
|
Implement, as best as you can, the identity function in your favorite
|
||||||
language (or the second favorite, if your favorite language happens to
|
language (or the second favorite, if your favorite language happens to
|
||||||
be Haskell).
|
be Haskell).
|
||||||
\item
|
\item
|
||||||
Implement the composition function in your favorite language. It takes
|
Implement the composition function in your favorite language. It takes
|
||||||
two functions as arguments and returns a function that is their
|
two functions as arguments and returns a function that is their
|
||||||
composition.
|
composition.
|
||||||
\item
|
\item
|
||||||
Write a program that tries to test that your composition function
|
Write a program that tries to test that your composition function
|
||||||
respects identity.
|
respects identity.
|
||||||
\item
|
\item
|
||||||
Is the world-wide web a category in any sense? Are links morphisms?
|
Is the world-wide web a category in any sense? Are links morphisms?
|
||||||
\item
|
\item
|
||||||
Is Facebook a category, with people as objects and friendships as
|
Is Facebook a category, with people as objects and friendships as
|
||||||
morphisms?
|
morphisms?
|
||||||
\item
|
\item
|
||||||
When is a directed graph a category?
|
When is a directed graph a category?
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -10,7 +10,7 @@ category inside another. The source category serves as a model, a
|
|||||||
blueprint, for some structure that's part of the target category.
|
blueprint, for some structure that's part of the target category.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering\includegraphics[width=0.4\textwidth]{images/1_functors.jpg}
|
\centering\includegraphics[width=0.4\textwidth]{images/1_functors.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -29,8 +29,8 @@ A mapping of functors should therefore map $F a$ to
|
|||||||
$G a$.
|
$G a$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/2_natcomp.jpg}
|
\includegraphics[width=0.3\textwidth]{images/2_natcomp.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -64,26 +64,26 @@ and $b$ in $\cat{C}$. It's mapped to two morphisms, $F\ f$
|
|||||||
and $G f$ in $\cat{D}$:
|
and $G f$ in $\cat{D}$:
|
||||||
|
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
F f \Colon F a \to F b \\
|
F f \Colon F a \to F b \\
|
||||||
G f \Colon G a \to G b
|
G f \Colon G a \to G b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
The natural transformation $\alpha$ provides two additional morphisms
|
The natural transformation $\alpha$ provides two additional morphisms
|
||||||
that complete the diagram in \emph{D}:
|
that complete the diagram in \emph{D}:
|
||||||
|
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\alpha_a \Colon F a \to G a \\
|
\alpha_a \Colon F a \to G a \\
|
||||||
\alpha_b \Colon F b \to G b
|
\alpha_b \Colon F b \to G b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/3_naturality.jpg}
|
\includegraphics[width=0.4\textwidth]{images/3_naturality.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
Now we have two ways of getting from $F a$ to $G b$. To
|
Now we have two ways of getting from $F a$ to $G b$. To
|
||||||
make sure that they are equal, we must impose the \newterm{naturality
|
make sure that they are equal, we must impose the \newterm{naturality
|
||||||
condition} that holds for any $f$:
|
condition} that holds for any $f$:
|
||||||
|
|
||||||
\[G f \circ \alpha_a = \alpha_b \circ F f\]
|
\[G f \circ \alpha_a = \alpha_b \circ F f\]
|
||||||
The naturality condition is a pretty stringent requirement. For
|
The naturality condition is a pretty stringent requirement. For
|
||||||
@ -94,8 +94,8 @@ $\alpha_a$ along $f$:
|
|||||||
\[\alpha_b = (G f) \circ \alpha_a \circ (F f)^{-1}\]
|
\[\alpha_b = (G f) \circ \alpha_a \circ (F f)^{-1}\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/4_transport.jpg}
|
\includegraphics[width=0.4\textwidth]{images/4_transport.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -114,8 +114,8 @@ also say that it maps morphisms to commuting squares --- there is one
|
|||||||
commuting naturality square in $\cat{D}$ for every morphism in $\cat{C}$.
|
commuting naturality square in $\cat{D}$ for every morphism in $\cat{C}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/naturality.jpg}
|
\includegraphics[width=0.4\textwidth]{images/naturality.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -215,9 +215,9 @@ limitations on the implementation --- one formula for all types. These
|
|||||||
limitations translate into equational theorems about such functions. In
|
limitations translate into equational theorems about such functions. In
|
||||||
the case of functions that transform functors, free theorems are the
|
the case of functions that transform functors, free theorems are the
|
||||||
naturality conditions.\footnote{
|
naturality conditions.\footnote{
|
||||||
You may read more about free theorems in my
|
You may read more about free theorems in my
|
||||||
blog \href{https://bartoszmilewski.com/2014/09/22/parametricity-money-for-nothing-and-theorems-for-free/}{``Parametricity:
|
blog \href{https://bartoszmilewski.com/2014/09/22/parametricity-money-for-nothing-and-theorems-for-free/}{``Parametricity:
|
||||||
Money for Nothing and Theorems for Free}.''}
|
Money for Nothing and Theorems for Free}.''}
|
||||||
|
|
||||||
One way of thinking about functors in Haskell that I mentioned earlier
|
One way of thinking about functors in Haskell that I mentioned earlier
|
||||||
is to consider them generalized containers. We can continue this analogy
|
is to consider them generalized containers. We can continue this analogy
|
||||||
@ -405,8 +405,8 @@ $\beta \cdot \alpha$ --- the composition of two natural transformations $\beta$
|
|||||||
\[(\beta \cdot \alpha)_a = \beta_a \circ \alpha_a\]
|
\[(\beta \cdot \alpha)_a = \beta_a \circ \alpha_a\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/5_vertical.jpg}
|
\includegraphics[width=0.4\textwidth]{images/5_vertical.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -415,8 +415,8 @@ composition is indeed a natural transformation from F to H:
|
|||||||
\[H f \circ (\beta \cdot \alpha)_a = (\beta \cdot \alpha)_b \circ F f\]
|
\[H f \circ (\beta \cdot \alpha)_a = (\beta \cdot \alpha)_b \circ F f\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/6_verticalnaturality.jpg}
|
\includegraphics[width=0.35\textwidth]{images/6_verticalnaturality.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -438,8 +438,8 @@ composition is important in defining the functor category. I'll explain
|
|||||||
horizontal composition shortly.
|
horizontal composition shortly.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/6a_vertical.jpg}
|
\includegraphics[width=0.3\textwidth]{images/6a_vertical.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -458,8 +458,8 @@ functors. A Hom-set in $\Cat$ is a set of functors. For instance
|
|||||||
$\cat{Cat(C, D)}$ is a set of functors between two categories $\cat{C}$ and $\cat{D}$.
|
$\cat{Cat(C, D)}$ is a set of functors between two categories $\cat{C}$ and $\cat{D}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/7_cathomset.jpg}
|
\includegraphics[width=0.3\textwidth]{images/7_cathomset.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -506,18 +506,18 @@ morphisms between morphisms.
|
|||||||
In the case of $\Cat$ seen as a $\cat{2}$-category we have:
|
In the case of $\Cat$ seen as a $\cat{2}$-category we have:
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Objects: (Small) categories
|
Objects: (Small) categories
|
||||||
\item
|
\item
|
||||||
1-morphisms: Functors between categories
|
1-morphisms: Functors between categories
|
||||||
\item
|
\item
|
||||||
2-morphisms: Natural transformations between functors.
|
2-morphisms: Natural transformations between functors.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/8_cat-2-cat.jpg}
|
\includegraphics[width=0.3\textwidth]{images/8_cat-2-cat.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -534,21 +534,21 @@ do they interact with each other?
|
|||||||
|
|
||||||
Let's pick two functors, or 1-morphisms, in $\Cat$:
|
Let's pick two functors, or 1-morphisms, in $\Cat$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
F \Colon \cat{C} \to \cat{D} \\
|
F \Colon \cat{C} \to \cat{D} \\
|
||||||
G \Colon \cat{D} \to \cat{E}
|
G \Colon \cat{D} \to \cat{E}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
and their composition:
|
and their composition:
|
||||||
\[G \circ F \Colon \cat{C} \to \cat{E}\]
|
\[G \circ F \Colon \cat{C} \to \cat{E}\]
|
||||||
Suppose we have two natural transformations, $\alpha$ and $\beta$, that act,
|
Suppose we have two natural transformations, $\alpha$ and $\beta$, that act,
|
||||||
respectively, on functors $F$ and $G$:
|
respectively, on functors $F$ and $G$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\alpha \Colon F \to F' \\
|
\alpha \Colon F \to F' \\
|
||||||
\beta \Colon G \to G'
|
\beta \Colon G \to G'
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/10_horizontal.jpg}
|
\includegraphics[width=0.4\textwidth]{images/10_horizontal.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -563,8 +563,8 @@ Having $\alpha$ and $\beta$ at our disposal, can we define a natural transformat
|
|||||||
from $G \circ F$ to $G' \circ F'$? Let me sketch the construction.
|
from $G \circ F$ to $G' \circ F'$? Let me sketch the construction.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/9_horizontal.jpg}
|
\includegraphics[width=0.5\textwidth]{images/9_horizontal.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -577,14 +577,14 @@ objects: $G (F a)$, $G'(F a)$, $G (F'a)$, $G'(F'a)$.
|
|||||||
We also have four morphisms forming a square. Two of these morphisms are
|
We also have four morphisms forming a square. Two of these morphisms are
|
||||||
the components of the natural transformation $\beta$:
|
the components of the natural transformation $\beta$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\beta_{F a} \Colon G (F a) \to G'(F a) \\
|
\beta_{F a} \Colon G (F a) \to G'(F a) \\
|
||||||
\beta_{F'a} \Colon G (F'a) \to G'(F'a)
|
\beta_{F'a} \Colon G (F'a) \to G'(F'a)
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
The other two are the images of $\alpha_a$ under the two
|
The other two are the images of $\alpha_a$ under the two
|
||||||
functors (functors map morphisms):
|
functors (functors map morphisms):
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
G \alpha_a \Colon G (F a) \to G (F'a) \\
|
G \alpha_a \Colon G (F a) \to G (F'a) \\
|
||||||
G'\alpha_a \Colon G'(F a) \to G'(F'a)
|
G'\alpha_a \Colon G'(F a) \to G'(F'a)
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
That's a lot of morphisms. Our goal is to find a morphism that goes from
|
That's a lot of morphisms. Our goal is to find a morphism that goes from
|
||||||
$G (F a)$ to $G'(F'a)$, a candidate for the
|
$G (F a)$ to $G'(F'a)$, a candidate for the
|
||||||
@ -592,8 +592,8 @@ component of a natural transformation connecting the two functors $G \circ F$
|
|||||||
and $G' \circ F'$. In fact there's not one but two paths we can take from
|
and $G' \circ F'$. In fact there's not one but two paths we can take from
|
||||||
$G (F a)$ to $G'(F'a)$:
|
$G (F a)$ to $G'(F'a)$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
G'\alpha_a \circ \beta_{F a} \\
|
G'\alpha_a \circ \beta_{F a} \\
|
||||||
\beta_{F'a} \circ G \alpha_a
|
\beta_{F'a} \circ G \alpha_a
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Luckily for us, they are equal, because the square we have formed turns
|
Luckily for us, they are equal, because the square we have formed turns
|
||||||
out to be the naturality square for $\beta$.
|
out to be the naturality square for $\beta$.
|
||||||
@ -620,8 +620,8 @@ categories, the ones that are connected by the functors it transforms.
|
|||||||
We can think of it as connecting these two categories.
|
We can think of it as connecting these two categories.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/sideways.jpg}
|
\includegraphics[width=0.5\textwidth]{images/sideways.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -689,36 +689,36 @@ natural transformation is a special type of polymorphic function.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Define a natural transformation from the \code{Maybe} functor to the
|
Define a natural transformation from the \code{Maybe} functor to the
|
||||||
list functor. Prove the naturality condition for it.
|
list functor. Prove the naturality condition for it.
|
||||||
\item
|
\item
|
||||||
Define at least two different natural transformations between
|
Define at least two different natural transformations between
|
||||||
\code{Reader ()} and the list functor. How many different lists of
|
\code{Reader ()} and the list functor. How many different lists of
|
||||||
\code{()} are there?
|
\code{()} are there?
|
||||||
\item
|
\item
|
||||||
Continue the previous exercise with \code{Reader Bool} and
|
Continue the previous exercise with \code{Reader Bool} and
|
||||||
\code{Maybe}.
|
\code{Maybe}.
|
||||||
\item
|
\item
|
||||||
Show that horizontal composition of natural transformation satisfies
|
Show that horizontal composition of natural transformation satisfies
|
||||||
the naturality condition (hint: use components). It's a good exercise
|
the naturality condition (hint: use components). It's a good exercise
|
||||||
in diagram chasing.
|
in diagram chasing.
|
||||||
\item
|
\item
|
||||||
Write a short essay about how you may enjoy writing down the evident
|
Write a short essay about how you may enjoy writing down the evident
|
||||||
diagrams needed to prove the interchange law.
|
diagrams needed to prove the interchange law.
|
||||||
\item
|
\item
|
||||||
Create a few test cases for the opposite naturality condition of
|
Create a few test cases for the opposite naturality condition of
|
||||||
transformations between different \code{Op} functors. Here's one
|
transformations between different \code{Op} functors. Here's one
|
||||||
choice:
|
choice:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
op :: Op Bool Int
|
op :: Op Bool Int
|
||||||
op = Op (\x -> x > 0)
|
op = Op (\x -> x > 0)
|
||||||
\end{snip}
|
\end{snip}
|
||||||
and
|
and
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
f :: String -> Int
|
f :: String -> Int
|
||||||
f x = read x
|
f x = read x
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
@ -12,8 +12,8 @@ happily hitting random keys, producing programs, compiling, and running
|
|||||||
them.
|
them.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/img_1329.jpg}
|
\includegraphics[width=0.3\textwidth]{images/img_1329.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -71,7 +71,7 @@ optional. Programmers tend to use them anyway, because they can tell a
|
|||||||
lot about the semantics of code, and they make compilation errors easier
|
lot about the semantics of code, and they make compilation errors easier
|
||||||
to understand. It's a common practice in Haskell to start a project by
|
to understand. It's a common practice in Haskell to start a project by
|
||||||
designing the types. \sloppy{Later, type annotations drive the implementation
|
designing the types. \sloppy{Later, type annotations drive the implementation
|
||||||
and become compiler-enforced comments.}
|
and become compiler-enforced comments.}
|
||||||
|
|
||||||
Strong static typing is often used as an excuse for not testing the
|
Strong static typing is often used as an excuse for not testing the
|
||||||
code. You may sometimes hear Haskell programmers saying, ``If it
|
code. You may sometimes hear Haskell programmers saying, ``If it
|
||||||
@ -178,8 +178,8 @@ complications, so at this point I will use my butcher's knife and
|
|||||||
terminate this line of reasoning. From the pragmatic point of view, it's
|
terminate this line of reasoning. From the pragmatic point of view, it's
|
||||||
okay to ignore non-terminating functions and bottoms, and treat
|
okay to ignore non-terminating functions and bottoms, and treat
|
||||||
$\Hask$ as bona fide $\Set$.\footnote{Nils Anders Danielsson,
|
$\Hask$ as bona fide $\Set$.\footnote{Nils Anders Danielsson,
|
||||||
John Hughes, Patrik Jansson, Jeremy Gibbons, \href{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/fast+loose.pdf}{
|
John Hughes, Patrik Jansson, Jeremy Gibbons, \href{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/fast+loose.pdf}{
|
||||||
Fast and Loose Reasoning is Morally Correct}. This paper provides justification for ignoring bottoms in most contexts.}
|
Fast and Loose Reasoning is Morally Correct}. This paper provides justification for ignoring bottoms in most contexts.}
|
||||||
|
|
||||||
\section{Why Do We Need a Mathematical Model?}
|
\section{Why Do We Need a Mathematical Model?}
|
||||||
|
|
||||||
@ -292,7 +292,7 @@ cannot be easily modelled as a mathematical function.
|
|||||||
|
|
||||||
In programming languages, functions that always produce the same result
|
In programming languages, functions that always produce the same result
|
||||||
given the same input and have no side effects are called \newterm{pure
|
given the same input and have no side effects are called \newterm{pure
|
||||||
functions}. In a pure functional language like Haskell all functions are
|
functions}. In a pure functional language like Haskell all functions are
|
||||||
pure. Because of that, it's easier to give these languages denotational
|
pure. Because of that, it's easier to give these languages denotational
|
||||||
semantics and model them using category theory. As for other languages,
|
semantics and model them using category theory. As for other languages,
|
||||||
it's always possible to restrict yourself to a pure subset, or reason
|
it's always possible to restrict yourself to a pure subset, or reason
|
||||||
@ -433,8 +433,8 @@ have the form \code{ctype::is(alpha, c)}, \code{ctype::is(digit, c)}, etc.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Define a higher-order function (or a function object) \code{memoize}
|
Define a higher-order function (or a function object) \code{memoize}
|
||||||
in your favorite language. This function takes a pure function
|
in your favorite language. This function takes a pure function
|
||||||
\code{f} as an argument and returns a function that behaves almost
|
\code{f} as an argument and returns a function that behaves almost
|
||||||
@ -446,15 +446,15 @@ have the form \code{ctype::is(alpha, c)}, \code{ctype::is(digit, c)}, etc.
|
|||||||
function that takes a long time to evaluate. You'll have to wait for
|
function that takes a long time to evaluate. You'll have to wait for
|
||||||
the result the first time you call it, but on subsequent calls, with
|
the result the first time you call it, but on subsequent calls, with
|
||||||
the same argument, you should get the result immediately.
|
the same argument, you should get the result immediately.
|
||||||
\item
|
\item
|
||||||
Try to memoize a function from your standard library that you normally
|
Try to memoize a function from your standard library that you normally
|
||||||
use to produce random numbers. Does it work?
|
use to produce random numbers. Does it work?
|
||||||
\item
|
\item
|
||||||
Most random number generators can be initialized with a seed.
|
Most random number generators can be initialized with a seed.
|
||||||
Implement a function that takes a seed, calls the random number
|
Implement a function that takes a seed, calls the random number
|
||||||
generator with that seed, and returns the result. Memoize that
|
generator with that seed, and returns the result. Memoize that
|
||||||
function. Does it work?
|
function. Does it work?
|
||||||
\item
|
\item
|
||||||
Which of these C++ functions are pure? Try to memoize them and observe
|
Which of these C++ functions are pure? Try to memoize them and observe
|
||||||
what happens when you call them multiple times: memoized and not.
|
what happens when you call them multiple times: memoized and not.
|
||||||
|
|
||||||
@ -463,18 +463,18 @@ have the form \code{ctype::is(alpha, c)}, \code{ctype::is(digit, c)}, etc.
|
|||||||
\item
|
\item
|
||||||
The factorial function from the example in the text.
|
The factorial function from the example in the text.
|
||||||
\item
|
\item
|
||||||
\begin{minted}{cpp}
|
\begin{minted}{cpp}
|
||||||
std::getchar()
|
std::getchar()
|
||||||
\end{minted}
|
\end{minted}
|
||||||
\item
|
\item
|
||||||
\begin{minted}{cpp}
|
\begin{minted}{cpp}
|
||||||
bool f() {
|
bool f() {
|
||||||
std::cout << "Hello!" << std::endl;
|
std::cout << "Hello!" << std::endl;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
\end{minted}
|
\end{minted}
|
||||||
\item
|
\item
|
||||||
\begin{minted}{cpp}
|
\begin{minted}{cpp}
|
||||||
int f(int x) {
|
int f(int x) {
|
||||||
static int y = 0;
|
static int y = 0;
|
||||||
y += x;
|
y += x;
|
||||||
@ -482,10 +482,10 @@ int f(int x) {
|
|||||||
}
|
}
|
||||||
\end{minted}
|
\end{minted}
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
\item
|
\item
|
||||||
How many different functions are there from \code{Bool} to
|
How many different functions are there from \code{Bool} to
|
||||||
\code{Bool}? Can you implement them all?
|
\code{Bool}? Can you implement them all?
|
||||||
\item
|
\item
|
||||||
Draw a picture of a category whose only objects are the types
|
Draw a picture of a category whose only objects are the types
|
||||||
\code{Void}, \code{()} (unit), and \code{Bool}; with arrows
|
\code{Void}, \code{()} (unit), and \code{Bool}; with arrows
|
||||||
corresponding to all possible functions between these types. Label the
|
corresponding to all possible functions between these types. Label the
|
||||||
|
@ -95,7 +95,7 @@ The neutral element is zero, because:
|
|||||||
and
|
and
|
||||||
\[a + 0 = a\]
|
\[a + 0 = a\]
|
||||||
The second equation is redundant, because addition is commutative $(a + b
|
The second equation is redundant, because addition is commutative $(a + b
|
||||||
= b + a)$, but commutativity is not part of the definition of a monoid.
|
= b + a)$, but commutativity is not part of the definition of a monoid.
|
||||||
For instance, string concatenation is not commutative and yet it forms a
|
For instance, string concatenation is not commutative and yet it forms a
|
||||||
monoid. The neutral element for string concatenation, by the way, is an
|
monoid. The neutral element for string concatenation, by the way, is an
|
||||||
empty string, which can be attached to either side of a string without
|
empty string, which can be attached to either side of a string without
|
||||||
@ -260,8 +260,8 @@ monoid can be described as a single object category with a set of
|
|||||||
morphisms that follow appropriate rules of composition.
|
morphisms that follow appropriate rules of composition.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/monoid.jpg}
|
\includegraphics[width=0.35\textwidth]{images/monoid.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -289,9 +289,9 @@ this product. So we can always recover a set monoid from a category
|
|||||||
monoid. For all intents and purposes they are one and the same.
|
monoid. For all intents and purposes they are one and the same.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/monoidhomset.jpg}
|
\includegraphics[width=0.4\textwidth]{images/monoidhomset.jpg}
|
||||||
\caption{Monoid hom-set seen as morphisms and as points in a set.}
|
\caption{Monoid hom-set seen as morphisms and as points in a set.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -310,8 +310,8 @@ set $\cat{M}(m, m)$.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Generate a free category from:
|
Generate a free category from:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
@ -327,7 +327,7 @@ set $\cat{M}(m, m)$.
|
|||||||
A graph with a single node and 26 arrows marked with the letters of
|
A graph with a single node and 26 arrows marked with the letters of
|
||||||
the alphabet: a, b, c \ldots{} z.
|
the alphabet: a, b, c \ldots{} z.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
\item
|
\item
|
||||||
What kind of order is this?
|
What kind of order is this?
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
@ -340,13 +340,13 @@ set $\cat{M}(m, m)$.
|
|||||||
\code{T2} if a pointer to \code{T1} can be passed to a function that expects a
|
\code{T2} if a pointer to \code{T1} can be passed to a function that expects a
|
||||||
pointer to \code{T2} without triggering a compilation error.
|
pointer to \code{T2} without triggering a compilation error.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
\item
|
\item
|
||||||
Considering that \code{Bool} is a set of two values \code{True} and \code{False}, show that
|
Considering that \code{Bool} is a set of two values \code{True} and \code{False}, show that
|
||||||
it forms two (set-theoretical) monoids with respect to, respectively,
|
it forms two (set-theoretical) monoids with respect to, respectively,
|
||||||
operator \code{\&\&} (AND) and \code{||} (OR).
|
operator \code{\&\&} (AND) and \code{||} (OR).
|
||||||
\item
|
\item
|
||||||
Represent the \code{Bool} monoid with the AND operator as a category: List
|
Represent the \code{Bool} monoid with the AND operator as a category: List
|
||||||
the morphisms and their rules of composition.
|
the morphisms and their rules of composition.
|
||||||
\item
|
\item
|
||||||
Represent addition modulo 3 as a monoid category.
|
Represent addition modulo 3 as a monoid category.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
@ -110,8 +110,8 @@ that they piggyback a message string on top of their regular return
|
|||||||
values.
|
values.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/piggyback.jpg}
|
\includegraphics[width=0.3\textwidth]{images/piggyback.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
\noindent
|
\noindent
|
||||||
We will ``embellish'' the return values of these functions. Let's do it
|
We will ``embellish'' the return values of these functions. Let's do it
|
||||||
@ -156,7 +156,7 @@ Now imagine a whole program written in this style. It's a nightmare of
|
|||||||
repetitive, error-prone code. But we are programmers. We know how to
|
repetitive, error-prone code. But we are programmers. We know how to
|
||||||
deal with repetitive code: we abstract it! This is, however, not your
|
deal with repetitive code: we abstract it! This is, however, not your
|
||||||
run of the mill abstraction --- we have to abstract \newterm{function
|
run of the mill abstraction --- we have to abstract \newterm{function
|
||||||
composition} itself. But composition is the essence of category theory,
|
composition} itself. But composition is the essence of category theory,
|
||||||
so before we write more code, let's analyze the problem from the
|
so before we write more code, let's analyze the problem from the
|
||||||
categorical point of view.
|
categorical point of view.
|
||||||
|
|
||||||
@ -205,16 +205,16 @@ So here's the recipe for the composition of two morphisms in this new
|
|||||||
category we are constructing:
|
category we are constructing:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Execute the embellished function corresponding to the first morphism
|
Execute the embellished function corresponding to the first morphism
|
||||||
\item
|
\item
|
||||||
Extract the first component of the result pair and pass it to the
|
Extract the first component of the result pair and pass it to the
|
||||||
embellished function corresponding to the second morphism
|
embellished function corresponding to the second morphism
|
||||||
\item
|
\item
|
||||||
Concatenate the second component (the string) of the first result
|
Concatenate the second component (the string) of the first result
|
||||||
and the second component (the string) of the second result
|
and the second component (the string) of the second result
|
||||||
\item
|
\item
|
||||||
Return a new pair combining the first component of the final result
|
Return a new pair combining the first component of the final result
|
||||||
with the concatenated string.
|
with the concatenated string.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
@ -431,15 +431,15 @@ optional<double> safe_root(double x) {
|
|||||||
Here's the challenge:
|
Here's the challenge:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Construct the Kleisli category for partial functions (define
|
Construct the Kleisli category for partial functions (define
|
||||||
composition and identity).
|
composition and identity).
|
||||||
\item
|
\item
|
||||||
Implement the embellished function \code{safe\_reciprocal} that
|
Implement the embellished function \code{safe\_reciprocal} that
|
||||||
returns a valid reciprocal of its argument, if it's different from
|
returns a valid reciprocal of its argument, if it's different from
|
||||||
zero.
|
zero.
|
||||||
\item
|
\item
|
||||||
Compose the functions \code{safe\_root} and \code{safe\_reciprocal} to implement
|
Compose the functions \code{safe\_root} and \code{safe\_reciprocal} to implement
|
||||||
\code{safe\_root\_reciprocal} that calculates \code{sqrt(1/x)}
|
\code{safe\_root\_reciprocal} that calculates \code{sqrt(1/x)}
|
||||||
whenever possible.
|
whenever possible.
|
||||||
|
@ -45,13 +45,13 @@ or equal-to another object. Which leads us to this definition of the
|
|||||||
initial object:
|
initial object:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
The \textbf{initial object} is the object that has one and only one
|
The \textbf{initial object} is the object that has one and only one
|
||||||
morphism going to any object in the category.
|
morphism going to any object in the category.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/initial.jpg}
|
\includegraphics[width=0.4\textwidth]{images/initial.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -87,13 +87,13 @@ object that's more terminal than any other object in the category.
|
|||||||
Again, we will insist on uniqueness:
|
Again, we will insist on uniqueness:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
The \textbf{terminal object} is the object with one and only one
|
The \textbf{terminal object} is the object with one and only one
|
||||||
morphism coming to it from any object in the category.
|
morphism coming to it from any object in the category.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/final.jpg}
|
\includegraphics[width=0.4\textwidth]{images/final.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -192,9 +192,9 @@ $i_{2}$ to $i_{1}$. What's the composition of
|
|||||||
these two morphisms?
|
these two morphisms?
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/uniqueness.jpg}
|
\includegraphics[width=0.4\textwidth]{images/uniqueness.jpg}
|
||||||
\caption{All morphisms in this diagram are unique.}
|
\caption{All morphisms in this diagram are unique.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -259,8 +259,8 @@ respectively:
|
|||||||
\src{snippet09}
|
\src{snippet09}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/productpattern.jpg}
|
\includegraphics[width=0.3\textwidth]{images/productpattern.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -268,8 +268,8 @@ All $c$s that fit this pattern will be considered candidates for
|
|||||||
the product. There may be lots of them.
|
the product. There may be lots of them.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/productcandidates.jpg}
|
\includegraphics[width=0.4\textwidth]{images/productcandidates.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -307,8 +307,8 @@ $p'$ and $q'$ can be reconstructed from $p$ and $q$ using $m$:
|
|||||||
\src{snippet12}
|
\src{snippet12}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/productranking.jpg}
|
\includegraphics[width=0.4\textwidth]{images/productranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -323,8 +323,8 @@ and \code{snd} is indeed \emph{better} than the two candidates I
|
|||||||
presented before.
|
presented before.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/not-a-product.jpg}
|
\includegraphics[width=0.4\textwidth]{images/not-a-product.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -375,10 +375,10 @@ category using the same universal construction. Such a product doesn't
|
|||||||
always exist, but when it does, it is unique up to a unique isomorphism.
|
always exist, but when it does, it is unique up to a unique isomorphism.
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
A \textbf{product} of two objects $a$ and $b$ is the object
|
A \textbf{product} of two objects $a$ and $b$ is the object
|
||||||
$c$ equipped with two projections such that for any other object
|
$c$ equipped with two projections such that for any other object
|
||||||
$c'$ equipped with two projections there is a unique morphism
|
$c'$ equipped with two projections there is a unique morphism
|
||||||
$m$ from $c'$ to $c$ that factorizes those projections.
|
$m$ from $c'$ to $c$ that factorizes those projections.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -399,8 +399,8 @@ and $b$ to $c$.
|
|||||||
\src{snippet21}
|
\src{snippet21}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/coproductpattern.jpg}
|
\includegraphics[width=0.4\textwidth]{images/coproductpattern.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -412,8 +412,8 @@ factorizes the injections:
|
|||||||
\src{snippet22}
|
\src{snippet22}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/coproductranking.jpg}
|
\includegraphics[width=0.4\textwidth]{images/coproductranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -422,10 +422,10 @@ any other pattern, is called a coproduct and, if it exists, is unique up
|
|||||||
to unique isomorphism.
|
to unique isomorphism.
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
A \textbf{coproduct} of two objects $a$ and $b$ is the object
|
A \textbf{coproduct} of two objects $a$ and $b$ is the object
|
||||||
$c$ equipped with two injections such that for any other object
|
$c$ equipped with two injections such that for any other object
|
||||||
$c'$ equipped with two injections there is a unique morphism
|
$c'$ equipped with two injections there is a unique morphism
|
||||||
$m$ from $c$ to $c'$ that factorizes those injections.
|
$m$ from $c$ to $c'$ that factorizes those injections.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -588,41 +588,41 @@ isomorphism is the same as a bijection.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show that the terminal object is unique up to unique isomorphism.
|
Show that the terminal object is unique up to unique isomorphism.
|
||||||
\item
|
\item
|
||||||
What is a product of two objects in a poset? Hint: Use the universal
|
What is a product of two objects in a poset? Hint: Use the universal
|
||||||
construction.
|
construction.
|
||||||
\item
|
\item
|
||||||
What is a coproduct of two objects in a poset?
|
What is a coproduct of two objects in a poset?
|
||||||
\item
|
\item
|
||||||
Implement the equivalent of Haskell \code{Either} as a generic type
|
Implement the equivalent of Haskell \code{Either} as a generic type
|
||||||
in your favorite language (other than Haskell).
|
in your favorite language (other than Haskell).
|
||||||
\item
|
\item
|
||||||
Show that \code{Either} is a ``better'' coproduct than \code{int}
|
Show that \code{Either} is a ``better'' coproduct than \code{int}
|
||||||
equipped with two injections:
|
equipped with two injections:
|
||||||
|
|
||||||
\begin{snip}{cpp}
|
\begin{snip}{cpp}
|
||||||
int i(int n) { return n; }
|
int i(int n) { return n; }
|
||||||
int j(bool b) { return b ? 0: 1; }
|
int j(bool b) { return b ? 0: 1; }
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
Hint: Define a function
|
Hint: Define a function
|
||||||
|
|
||||||
\begin{snip}{cpp}
|
\begin{snip}{cpp}
|
||||||
int m(Either const & e);
|
int m(Either const & e);
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
that factorizes \code{i} and \code{j}.
|
that factorizes \code{i} and \code{j}.
|
||||||
\item
|
\item
|
||||||
Continuing the previous problem: How would you argue that \code{int}
|
Continuing the previous problem: How would you argue that \code{int}
|
||||||
with the two injections \code{i} and \code{j} cannot be ``better''
|
with the two injections \code{i} and \code{j} cannot be ``better''
|
||||||
than \code{Either}?
|
than \code{Either}?
|
||||||
\item
|
\item
|
||||||
Still continuing: What about these injections?
|
Still continuing: What about these injections?
|
||||||
|
|
||||||
\begin{snip}{cpp}
|
\begin{snip}{cpp}
|
||||||
int i(int n) {
|
int i(int n) {
|
||||||
if (n < 0) return n;
|
if (n < 0) return n;
|
||||||
return n + 2;
|
return n + 2;
|
||||||
@ -630,7 +630,7 @@ int i(int n) {
|
|||||||
|
|
||||||
int j(bool b) { return b ? 0: 1; }
|
int j(bool b) { return b ? 0: 1; }
|
||||||
\end{snip}
|
\end{snip}
|
||||||
\item
|
\item
|
||||||
Come up with an inferior candidate for a coproduct of \code{int} and
|
Come up with an inferior candidate for a coproduct of \code{int} and
|
||||||
\code{bool} that cannot be better than \code{Either} because it
|
\code{bool} that cannot be better than \code{Either} because it
|
||||||
allows multiple acceptable morphisms from it to \code{Either}.
|
allows multiple acceptable morphisms from it to \code{Either}.
|
||||||
@ -639,8 +639,8 @@ int j(bool b) { return b ? 0: 1; }
|
|||||||
\section{Bibliography}
|
\section{Bibliography}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
The Catsters,
|
The Catsters,
|
||||||
\urlref{https://www.youtube.com/watch?v=upCSDIO9pjc}{Products and
|
\urlref{https://www.youtube.com/watch?v=upCSDIO9pjc}{Products and
|
||||||
Coproducts} video.
|
Coproducts} video.
|
||||||
|
@ -22,8 +22,8 @@ in C++ it's a relatively complex template defined in the Standard
|
|||||||
Library.
|
Library.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/pair.jpg}
|
\includegraphics[width=0.35\textwidth]{images/pair.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -384,17 +384,17 @@ statements about, say, natural numbers, which form a rig, to statements
|
|||||||
about types. Here's a translation table with some entries of interest:
|
about types. Here's a translation table with some entries of interest:
|
||||||
|
|
||||||
\begin{longtable}[]{@{}ll@{}}
|
\begin{longtable}[]{@{}ll@{}}
|
||||||
\toprule
|
\toprule
|
||||||
Numbers & Types\tabularnewline
|
Numbers & Types\tabularnewline
|
||||||
\midrule
|
\midrule
|
||||||
\endhead
|
\endhead
|
||||||
$0$ & \code{Void}\tabularnewline
|
$0$ & \code{Void}\tabularnewline
|
||||||
$1$ & \code{()}\tabularnewline
|
$1$ & \code{()}\tabularnewline
|
||||||
$a + b$ & \code{Either a b = Left a | Right b}\tabularnewline
|
$a + b$ & \code{Either a b = Left a | Right b}\tabularnewline
|
||||||
$a \times b$ & \code{(a, b)} or \code{Pair a b = Pair a b}\tabularnewline
|
$a \times b$ & \code{(a, b)} or \code{Pair a b = Pair a b}\tabularnewline
|
||||||
$2 = 1 + 1$ & \code{data Bool = True | False}\tabularnewline
|
$2 = 1 + 1$ & \code{data Bool = True | False}\tabularnewline
|
||||||
$1 + a$ & \code{data Maybe = Nothing | Just a}\tabularnewline
|
$1 + a$ & \code{data Maybe = Nothing | Just a}\tabularnewline
|
||||||
\bottomrule
|
\bottomrule
|
||||||
\end{longtable}
|
\end{longtable}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -407,7 +407,7 @@ If we do our usual substitutions, and also replace \code{List a} with
|
|||||||
\code{x}, we get the equation:
|
\code{x}, we get the equation:
|
||||||
|
|
||||||
\begin{Verbatim}
|
\begin{Verbatim}
|
||||||
x = 1 + a * x
|
x = 1 + a * x
|
||||||
\end{Verbatim}
|
\end{Verbatim}
|
||||||
We can't solve it using traditional algebraic methods because we can't
|
We can't solve it using traditional algebraic methods because we can't
|
||||||
subtract or divide types. But we can try a series of substitutions,
|
subtract or divide types. But we can try a series of substitutions,
|
||||||
@ -416,11 +416,11 @@ where we keep replacing \code{x} on the right hand side with
|
|||||||
the following series:
|
the following series:
|
||||||
|
|
||||||
\begin{Verbatim}
|
\begin{Verbatim}
|
||||||
x = 1 + a*x
|
x = 1 + a*x
|
||||||
x = 1 + a*(1 + a*x) = 1 + a + a*a*x
|
x = 1 + a*(1 + a*x) = 1 + a + a*a*x
|
||||||
x = 1 + a + a*a*(1 + a*x) = 1 + a + a*a + a*a*a*x
|
x = 1 + a + a*a*(1 + a*x) = 1 + a + a*a + a*a*a*x
|
||||||
...
|
...
|
||||||
x = 1 + a + a*a + a*a*a + a*a*a*a...
|
x = 1 + a + a*a + a*a*a + a*a*a*a...
|
||||||
\end{Verbatim}
|
\end{Verbatim}
|
||||||
We end up with an infinite sum of products (tuples), which can be
|
We end up with an infinite sum of products (tuples), which can be
|
||||||
interpreted as: A list is either empty, \code{1}; or a singleton,
|
interpreted as: A list is either empty, \code{1}; or a singleton,
|
||||||
@ -445,15 +445,15 @@ of them is inhabited. Logical \emph{and} and \emph{or} also form a
|
|||||||
semiring, and it too can be mapped into type theory:
|
semiring, and it too can be mapped into type theory:
|
||||||
|
|
||||||
\begin{longtable}[]{@{}ll@{}}
|
\begin{longtable}[]{@{}ll@{}}
|
||||||
\toprule
|
\toprule
|
||||||
Logic & Types\tabularnewline
|
Logic & Types\tabularnewline
|
||||||
\midrule
|
\midrule
|
||||||
\endhead
|
\endhead
|
||||||
$\mathit{false}$ & \code{Void}\tabularnewline
|
$\mathit{false}$ & \code{Void}\tabularnewline
|
||||||
$\mathit{true}$ & \code{()}\tabularnewline
|
$\mathit{true}$ & \code{()}\tabularnewline
|
||||||
$a \mathbin{||} b$ & \code{Either a b = Left a | Right b}\tabularnewline
|
$a \mathbin{||} b$ & \code{Either a b = Left a | Right b}\tabularnewline
|
||||||
$a \mathbin{\&\&} b$ & \code{(a, b)}\tabularnewline
|
$a \mathbin{\&\&} b$ & \code{(a, b)}\tabularnewline
|
||||||
\bottomrule
|
\bottomrule
|
||||||
\end{longtable}
|
\end{longtable}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -464,21 +464,21 @@ talk about function types.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show the isomorphism between \code{Maybe a} and
|
Show the isomorphism between \code{Maybe a} and
|
||||||
\code{Either () a}.
|
\code{Either () a}.
|
||||||
\item
|
\item
|
||||||
Here's a sum type defined in Haskell:
|
Here's a sum type defined in Haskell:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
data Shape = Circle Float
|
data Shape = Circle Float
|
||||||
| Rect Float Float
|
| Rect Float Float
|
||||||
\end{snip}
|
\end{snip}
|
||||||
When we want to define a function like \code{area} that acts on a
|
When we want to define a function like \code{area} that acts on a
|
||||||
\code{Shape}, we do it by pattern matching on the two constructors:
|
\code{Shape}, we do it by pattern matching on the two constructors:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
area :: Shape -> Float
|
area :: Shape -> Float
|
||||||
area (Circle r) = pi * r * r
|
area (Circle r) = pi * r * r
|
||||||
area (Rect d h) = d * h
|
area (Rect d h) = d * h
|
||||||
@ -486,24 +486,24 @@ area (Rect d h) = d * h
|
|||||||
Implement \code{Shape} in C++ or Java as an interface and create two
|
Implement \code{Shape} in C++ or Java as an interface and create two
|
||||||
classes: \code{Circle} and \code{Rect}. Implement \code{area} as
|
classes: \code{Circle} and \code{Rect}. Implement \code{area} as
|
||||||
a virtual function.
|
a virtual function.
|
||||||
\item
|
\item
|
||||||
Continuing with the previous example: We can easily add a new function
|
Continuing with the previous example: We can easily add a new function
|
||||||
\code{circ} that calculates the circumference of a \code{Shape}.
|
\code{circ} that calculates the circumference of a \code{Shape}.
|
||||||
We can do it without touching the definition of \code{Shape}:
|
We can do it without touching the definition of \code{Shape}:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
circ :: Shape -> Float
|
circ :: Shape -> Float
|
||||||
circ (Circle r) = 2.0 * pi * r
|
circ (Circle r) = 2.0 * pi * r
|
||||||
circ (Rect d h) = 2.0 * (d + h)
|
circ (Rect d h) = 2.0 * (d + h)
|
||||||
\end{snip}
|
\end{snip}
|
||||||
Add \code{circ} to your C++ or Java implementation. What parts of
|
Add \code{circ} to your C++ or Java implementation. What parts of
|
||||||
the original code did you have to touch?
|
the original code did you have to touch?
|
||||||
\item
|
\item
|
||||||
Continuing further: Add a new shape, \code{Square}, to
|
Continuing further: Add a new shape, \code{Square}, to
|
||||||
\code{Shape} and make all the necessary updates. What code did you
|
\code{Shape} and make all the necessary updates. What code did you
|
||||||
have to touch in Haskell vs. C++ or Java? (Even if you're not a
|
have to touch in Haskell vs. C++ or Java? (Even if you're not a
|
||||||
Haskell programmer, the modifications should be pretty obvious.)
|
Haskell programmer, the modifications should be pretty obvious.)
|
||||||
\item
|
\item
|
||||||
Show that $a + a = 2 \times a$ holds for types (up to
|
Show that $a + a = 2 \times a$ holds for types (up to
|
||||||
isomorphism). Remember that $2$ corresponds to \code{Bool},
|
isomorphism). Remember that $2$ corresponds to \code{Bool},
|
||||||
according to our translation table.
|
according to our translation table.
|
||||||
|
@ -21,7 +21,7 @@ makes sense by now. I won't use parentheses when applying functors to
|
|||||||
objects or morphisms.)
|
objects or morphisms.)
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering\includegraphics[width=0.3\textwidth]{images/functor.jpg}
|
\centering\includegraphics[width=0.3\textwidth]{images/functor.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -36,8 +36,8 @@ and $g$:
|
|||||||
\[F h = F g~.~F f\]
|
\[F h = F g~.~F f\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/functorcompos.jpg}
|
\includegraphics[width=0.3\textwidth]{images/functorcompos.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -50,8 +50,8 @@ Here, $\idarrow[a]$ is the identity at the object $a$,
|
|||||||
and $\idarrow[F a]$ the identity at $F a$.
|
and $\idarrow[F a]$ the identity at $F a$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/functorid.jpg}
|
\includegraphics[width=0.3\textwidth]{images/functorid.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -124,8 +124,8 @@ signature:
|
|||||||
\src{snippet04}
|
\src{snippet04}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/functormaybe.jpg}
|
\includegraphics[width=0.35\textwidth]{images/functormaybe.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -707,22 +707,22 @@ We'll see later that functors form categories as well.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Can we turn the \code{Maybe} type constructor into a functor by
|
Can we turn the \code{Maybe} type constructor into a functor by
|
||||||
defining:
|
defining:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
fmap _ _ = Nothing
|
fmap _ _ = Nothing
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
which ignores both of its arguments? (Hint: Check the functor laws.)
|
which ignores both of its arguments? (Hint: Check the functor laws.)
|
||||||
\item
|
\item
|
||||||
Prove functor laws for the reader functor. Hint: it's really simple.
|
Prove functor laws for the reader functor. Hint: it's really simple.
|
||||||
\item
|
\item
|
||||||
Implement the reader functor in your second favorite language (the
|
Implement the reader functor in your second favorite language (the
|
||||||
first being Haskell, of course).
|
first being Haskell, of course).
|
||||||
\item
|
\item
|
||||||
Prove the functor laws for the list functor. Assume that the laws are
|
Prove the functor laws for the list functor. Assume that the laws are
|
||||||
true for the tail part of the list you're applying it to (in other
|
true for the tail part of the list you're applying it to (in other
|
||||||
words, use \emph{induction}).
|
words, use \emph{induction}).
|
||||||
|
@ -18,7 +18,7 @@ $\cat{E}$. Notice that this is just saying that it's a mapping from a
|
|||||||
\newterm{Cartesian product} of categories $\cat{C}\times{}\cat{D}$ to $\cat{E}$.
|
\newterm{Cartesian product} of categories $\cat{C}\times{}\cat{D}$ to $\cat{E}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering\includegraphics[width=0.3\textwidth]{images/bifunctor.jpg}
|
\centering\includegraphics[width=0.3\textwidth]{images/bifunctor.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -52,8 +52,8 @@ constructor that takes two type arguments. Here's the definition of the
|
|||||||
\src{snippet01}
|
\src{snippet01}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering\includegraphics[width=0.3\textwidth]{images/bimap.jpg}
|
\centering\includegraphics[width=0.3\textwidth]{images/bimap.jpg}
|
||||||
\caption{bimap}
|
\caption{bimap}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
The type variable \code{f} represents the bifunctor. You can see that
|
The type variable \code{f} represents the bifunctor. You can see that
|
||||||
@ -102,7 +102,7 @@ make sure they are related to each other in this manner).
|
|||||||
|
|
||||||
An important example of a bifunctor is the categorical product --- a
|
An important example of a bifunctor is the categorical product --- a
|
||||||
product of two objects that is defined by a \hyperref[products-and-coproducts]{universal
|
product of two objects that is defined by a \hyperref[products-and-coproducts]{universal
|
||||||
construction}. If the product exists for any pair of objects, the
|
construction}. If the product exists for any pair of objects, the
|
||||||
mapping from those objects to the product is bifunctorial. This is true
|
mapping from those objects to the product is bifunctorial. This is true
|
||||||
in general, and in Haskell in particular. Here's the \code{Bifunctor}
|
in general, and in Haskell in particular. Here's the \code{Bifunctor}
|
||||||
instance for a pair constructor --- the simplest product type:
|
instance for a pair constructor --- the simplest product type:
|
||||||
@ -362,7 +362,7 @@ This implementation can also be automatically derived by the compiler.
|
|||||||
\section{The Writer Functor}
|
\section{The Writer Functor}
|
||||||
|
|
||||||
I promised that I would come back to the \hyperref[kleisli-categories]{Kleisli
|
I promised that I would come back to the \hyperref[kleisli-categories]{Kleisli
|
||||||
category} I described earlier. Morphisms in that category were
|
category} I described earlier. Morphisms in that category were
|
||||||
represented as ``embellished'' functions returning the \code{Writer}
|
represented as ``embellished'' functions returning the \code{Writer}
|
||||||
data structure.
|
data structure.
|
||||||
|
|
||||||
@ -488,8 +488,8 @@ by the way --- the kind we've been studying thus far --- are called
|
|||||||
\emph{covariant} functors.
|
\emph{covariant} functors.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=40mm]{images/contravariant.jpg}
|
\includegraphics[width=40mm]{images/contravariant.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -538,9 +538,9 @@ the defaults for \code{lmap} and \code{rmap}, or implementing both
|
|||||||
\code{dimap}.
|
\code{dimap}.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/dimap.jpg}
|
\includegraphics[width=0.4\textwidth]{images/dimap.jpg}
|
||||||
\caption{dimap}
|
\caption{dimap}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -562,8 +562,8 @@ category $\cat{C}^{op}\times{}\cat{C}$ to the category of sets, $\Set$.
|
|||||||
Let's define its action on morphisms. A morphism in
|
Let's define its action on morphisms. A morphism in
|
||||||
$\cat{C}^{op}\times{}\cat{C}$ is a pair of morphisms from $\cat{C}$:
|
$\cat{C}^{op}\times{}\cat{C}$ is a pair of morphisms from $\cat{C}$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f \Colon a' \to a \\
|
f \Colon a' \to a \\
|
||||||
g \Colon b \to b'
|
g \Colon b \to b'
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
The lifting of this pair must be a morphism (a function) from the set
|
The lifting of this pair must be a morphism (a function) from the set
|
||||||
$\cat{C}(a, b)$ to the set $\cat{C}(a', b')$. Just pick
|
$\cat{C}(a, b)$ to the set $\cat{C}(a', b')$. Just pick
|
||||||
@ -577,11 +577,11 @@ As you can see, the hom-functor is a special case of a profunctor.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show that the data type:
|
Show that the data type:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
data Pair a b = Pair a b
|
data Pair a b = Pair a b
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
@ -589,23 +589,23 @@ data Pair a b = Pair a b
|
|||||||
\code{Bifunctor} and use equational reasoning to show that these
|
\code{Bifunctor} and use equational reasoning to show that these
|
||||||
definitions are compatible with the default implementations whenever
|
definitions are compatible with the default implementations whenever
|
||||||
they can be applied.
|
they can be applied.
|
||||||
\item
|
\item
|
||||||
Show the isomorphism between the standard definition of \code{Maybe}
|
Show the isomorphism between the standard definition of \code{Maybe}
|
||||||
and this desugaring:
|
and this desugaring:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
type Maybe' a = Either (Const () a) (Identity a)
|
type Maybe' a = Either (Const () a) (Identity a)
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
Hint: Define two mappings between the two implementations. For
|
Hint: Define two mappings between the two implementations. For
|
||||||
additional credit, show that they are the inverse of each other using
|
additional credit, show that they are the inverse of each other using
|
||||||
equational reasoning.
|
equational reasoning.
|
||||||
\item
|
\item
|
||||||
Let's try another data structure. I call it a \code{PreList} because
|
Let's try another data structure. I call it a \code{PreList} because
|
||||||
it's a precursor to a \code{List}. It replaces recursion with a type
|
it's a precursor to a \code{List}. It replaces recursion with a type
|
||||||
parameter \code{b}.
|
parameter \code{b}.
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
data PreList a b = Nil | Cons a b
|
data PreList a b = Nil | Cons a b
|
||||||
\end{snip}
|
\end{snip}
|
||||||
|
|
||||||
@ -614,11 +614,11 @@ data PreList a b = Nil | Cons a b
|
|||||||
done when we talk about fixed points).
|
done when we talk about fixed points).
|
||||||
|
|
||||||
Show that \code{PreList} is an instance of \code{Bifunctor}.
|
Show that \code{PreList} is an instance of \code{Bifunctor}.
|
||||||
\item
|
\item
|
||||||
Show that the following data types define bifunctors in \code{a} and
|
Show that the following data types define bifunctors in \code{a} and
|
||||||
\code{b}:
|
\code{b}:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
data K2 c a b = K2 c
|
data K2 c a b = K2 c
|
||||||
|
|
||||||
data Fst a b = Fst a
|
data Fst a b = Fst a
|
||||||
@ -629,10 +629,10 @@ data Snd a b = Snd b
|
|||||||
For additional credit, check your solutions against Conor McBride's
|
For additional credit, check your solutions against Conor McBride's
|
||||||
paper \urlref{http://strictlypositive.org/CJ.pdf}{Clowns to the Left of
|
paper \urlref{http://strictlypositive.org/CJ.pdf}{Clowns to the Left of
|
||||||
me, Jokers to the Right}.
|
me, Jokers to the Right}.
|
||||||
\item
|
\item
|
||||||
Define a bifunctor in a language other than Haskell. Implement
|
Define a bifunctor in a language other than Haskell. Implement
|
||||||
\code{bimap} for a generic pair in that language.
|
\code{bimap} for a generic pair in that language.
|
||||||
\item
|
\item
|
||||||
Should \code{std::map} be considered a bifunctor or a profunctor in
|
Should \code{std::map} be considered a bifunctor or a profunctor in
|
||||||
the two template arguments \code{Key} and \code{T}? How would you
|
the two template arguments \code{Key} and \code{T}? How would you
|
||||||
redesign this data type to make it so?
|
redesign this data type to make it so?
|
||||||
|
@ -12,9 +12,9 @@ in the category $\Set$ every hom-set is itself an object in the
|
|||||||
same category ---because it is, after all, a \emph{set}.
|
same category ---because it is, after all, a \emph{set}.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/set-hom-set.jpg}
|
\includegraphics[width=0.35\textwidth]{images/set-hom-set.jpg}
|
||||||
\caption{Hom-set in Set is just a set}
|
\caption{Hom-set in Set is just a set}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -22,9 +22,9 @@ The same is not true of other categories where hom-sets are external to
|
|||||||
a category. They are even called \emph{external} hom-sets.
|
a category. They are even called \emph{external} hom-sets.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/hom-set.jpg}
|
\includegraphics[width=0.35\textwidth]{images/hom-set.jpg}
|
||||||
\caption{Hom-set in category C is an external set}
|
\caption{Hom-set in category C is an external set}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -46,7 +46,7 @@ relationship to the argument type and the result type. We've already
|
|||||||
seen the constructions of composite types --- those that involved
|
seen the constructions of composite types --- those that involved
|
||||||
relationships between objects. We used universal constructions to define
|
relationships between objects. We used universal constructions to define
|
||||||
a \hyperref[products-and-coproducts]{product
|
a \hyperref[products-and-coproducts]{product
|
||||||
and coproduct types}. We can use the same trick to define a
|
and coproduct types}. We can use the same trick to define a
|
||||||
function type. We will need a pattern that involves three objects: the
|
function type. We will need a pattern that involves three objects: the
|
||||||
function type that we are constructing, the argument type, and the
|
function type that we are constructing, the argument type, and the
|
||||||
result type.
|
result type.
|
||||||
@ -68,10 +68,10 @@ $f x$ (the application of $f$ to $x$, which is an
|
|||||||
element of $b$).
|
element of $b$).
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering\includegraphics[width=0.35\textwidth]{images/functionset.jpg}
|
\centering\includegraphics[width=0.35\textwidth]{images/functionset.jpg}
|
||||||
\caption{In Set we can pick a function $f$ from a set of functions $z$ and we can
|
\caption{In Set we can pick a function $f$ from a set of functions $z$ and we can
|
||||||
pick an argument $x$ from the set (type) $a$. We get an element $f x$ in the
|
pick an argument $x$ from the set (type) $a$. We get an element $f x$ in the
|
||||||
set (type) $b$.}
|
set (type) $b$.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -86,10 +86,10 @@ So that's the pattern: a product of two objects $z$ and
|
|||||||
$a$ connected to another object $b$ by a morphism $g$.
|
$a$ connected to another object $b$ by a morphism $g$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/functionpattern.jpg}
|
\includegraphics[width=0.4\textwidth]{images/functionpattern.jpg}
|
||||||
\caption{A pattern of objects and morphisms that is the starting point of the
|
\caption{A pattern of objects and morphisms that is the starting point of the
|
||||||
universal construction}
|
universal construction}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -121,9 +121,9 @@ through the application of $g$. (Hint: Read this sentence while
|
|||||||
looking at the picture.)
|
looking at the picture.)
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/functionranking.jpg}
|
\includegraphics[width=0.4\textwidth]{images/functionranking.jpg}
|
||||||
\caption{Establishing a ranking between candidates for the function object}
|
\caption{Establishing a ranking between candidates for the function object}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -134,7 +134,7 @@ that has both $z'$ and $z$ crossed with $a$.
|
|||||||
What we really need, given the mapping $h$ from $z'$
|
What we really need, given the mapping $h$ from $z'$
|
||||||
to $z$, is a mapping from $z' \times a$ to $z \times a$.
|
to $z$, is a mapping from $z' \times a$ to $z \times a$.
|
||||||
And now, after discussing the \hyperref[functoriality]{functoriality
|
And now, after discussing the \hyperref[functoriality]{functoriality
|
||||||
of the product}, we know how to do it. Because the product itself is a
|
of the product}, we know how to do it. Because the product itself is a
|
||||||
functor (more precisely an endo-bi-functor), it's possible to lift pairs
|
functor (more precisely an endo-bi-functor), it's possible to lift pairs
|
||||||
of morphisms. In other words, we can define not only products of objects
|
of morphisms. In other words, we can define not only products of objects
|
||||||
but also products of morphisms.
|
but also products of morphisms.
|
||||||
@ -161,29 +161,29 @@ $eval$. This object is better than any other object according to
|
|||||||
our ranking.
|
our ranking.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/universalfunctionobject.jpg}
|
\includegraphics[width=0.4\textwidth]{images/universalfunctionobject.jpg}
|
||||||
\caption{The definition of the universal function object. This is the same
|
\caption{The definition of the universal function object. This is the same
|
||||||
diagram as above, but now the object $a \Rightarrow b$ is \emph{universal}.}
|
diagram as above, but now the object $a \Rightarrow b$ is \emph{universal}.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
Formally:
|
Formally:
|
||||||
|
|
||||||
\begin{longtable}[]{@{}l@{}}
|
\begin{longtable}[]{@{}l@{}}
|
||||||
\toprule
|
\toprule
|
||||||
\begin{minipage}[t]{0.97\columnwidth}\raggedright\strut
|
\begin{minipage}[t]{0.97\columnwidth}\raggedright\strut
|
||||||
A \emph{function object} from $a$ to $b$ is an object
|
A \emph{function object} from $a$ to $b$ is an object
|
||||||
$a \Rightarrow b$ together with the morphism
|
$a \Rightarrow b$ together with the morphism
|
||||||
\[eval \Colon ((a \Rightarrow b) \times a) \to b\]
|
\[eval \Colon ((a \Rightarrow b) \times a) \to b\]
|
||||||
such that for any other object $z$ with a morphism
|
such that for any other object $z$ with a morphism
|
||||||
\[g \Colon z \times a \to b\]
|
\[g \Colon z \times a \to b\]
|
||||||
there is a unique morphism
|
there is a unique morphism
|
||||||
\[h \Colon z \to (a \Rightarrow b)\]
|
\[h \Colon z \to (a \Rightarrow b)\]
|
||||||
that factors $g$ through $eval$:
|
that factors $g$ through $eval$:
|
||||||
\[g = eval \circ (h \times \id)\]
|
\[g = eval \circ (h \times \id)\]
|
||||||
\end{minipage}\tabularnewline
|
\end{minipage}\tabularnewline
|
||||||
\bottomrule
|
\bottomrule
|
||||||
\end{longtable}
|
\end{longtable}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -365,12 +365,12 @@ such a category.
|
|||||||
A Cartesian closed category must contain:
|
A Cartesian closed category must contain:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
The terminal object,
|
The terminal object,
|
||||||
\item
|
\item
|
||||||
A product of any pair of objects, and
|
A product of any pair of objects, and
|
||||||
\item
|
\item
|
||||||
An exponential for any pair of objects.
|
An exponential for any pair of objects.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
If you consider an exponential as an iterated product (possibly
|
If you consider an exponential as an iterated product (possibly
|
||||||
@ -388,8 +388,8 @@ The terminal object and the product have their duals: the initial object
|
|||||||
and the coproduct. A Cartesian closed category that also supports those
|
and the coproduct. A Cartesian closed category that also supports those
|
||||||
two, and in which product can be distributed over coproduct
|
two, and in which product can be distributed over coproduct
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
a \times (b + c) = a \times b + a \times c \\
|
a \times (b + c) = a \times b + a \times c \\
|
||||||
(b + c) \times a = b \times a + c \times a
|
(b + c) \times a = b \times a + c \times a
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
is called a \newterm{bicartesian closed} category. We'll see in the next
|
is called a \newterm{bicartesian closed} category. We'll see in the next
|
||||||
section that bicartesian closed categories, of which $\Set$ is a
|
section that bicartesian closed categories, of which $\Set$ is a
|
||||||
@ -596,8 +596,8 @@ revolutionizing the world in more than one way.
|
|||||||
\section{Bibliography}
|
\section{Bibliography}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Ralph Hinze, Daniel W. H. James,
|
Ralph Hinze, Daniel W. H. James,
|
||||||
\urlref{http://www.cs.ox.ac.uk/ralf.hinze/publications/WGP10.pdf}{Reason
|
\urlref{http://www.cs.ox.ac.uk/ralf.hinze/publications/WGP10.pdf}{Reason
|
||||||
Isomorphically!}. This paper contains proofs of all those high-school
|
Isomorphically!}. This paper contains proofs of all those high-school
|
||||||
|
@ -37,7 +37,7 @@ if we could find it, we would probably revolutionize our understanding
|
|||||||
of the universe.
|
of the universe.
|
||||||
|
|
||||||
\begin{wrapfigure}{R}{0pt}
|
\begin{wrapfigure}{R}{0pt}
|
||||||
\includegraphics[width=0.5\textwidth]{images/asteroids.png}
|
\includegraphics[width=0.5\textwidth]{images/asteroids.png}
|
||||||
\end{wrapfigure}
|
\end{wrapfigure}
|
||||||
|
|
||||||
Let me elaborate. There is a similar duality in physics, which either
|
Let me elaborate. There is a similar duality in physics, which either
|
||||||
@ -66,8 +66,8 @@ which is given by force divided by mass.
|
|||||||
These are the direct encodings of the differential equations
|
These are the direct encodings of the differential equations
|
||||||
corresponding to Newton's laws of motion:
|
corresponding to Newton's laws of motion:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
F &= m \frac{dv}{dt} \\
|
F & = m \frac{dv}{dt} \\
|
||||||
v &= \frac{dx}{dt}
|
v & = \frac{dx}{dt}
|
||||||
\end{align*}
|
\end{align*}
|
||||||
Similar methods may be applied to more complex problems, like the
|
Similar methods may be applied to more complex problems, like the
|
||||||
propagation of electromagnetic fields using Maxwell's equations, or even
|
propagation of electromagnetic fields using Maxwell's equations, or even
|
||||||
@ -93,14 +93,14 @@ then take a shortcut through water. The path of minimum time makes the
|
|||||||
ray refract at the boundary of air and water, resulting in Snell's law
|
ray refract at the boundary of air and water, resulting in Snell's law
|
||||||
of refraction:
|
of refraction:
|
||||||
\begin{equation*}
|
\begin{equation*}
|
||||||
\frac{sin(\theta_1)}{sin(\theta_2)} = \frac{v_1}{v_2}
|
\frac{sin(\theta_1)}{sin(\theta_2)} = \frac{v_1}{v_2}
|
||||||
\end{equation*}
|
\end{equation*}
|
||||||
where $v_1$ is the speed of light in the air and $v_2$ is
|
where $v_1$ is the speed of light in the air and $v_2$ is
|
||||||
the speed of light in the water.
|
the speed of light in the water.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/snell.jpg}
|
\includegraphics[width=0.3\textwidth]{images/snell.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -116,8 +116,8 @@ minimize kinetic energy. Then it will speed up to go quickly through the
|
|||||||
area of low potential energy.
|
area of low potential energy.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/mortar.jpg}
|
\includegraphics[width=0.35\textwidth]{images/mortar.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -128,8 +128,8 @@ Feynman path integral between those states is used to calculate the
|
|||||||
probability of transition.
|
probability of transition.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/feynman.jpg}
|
\includegraphics[width=0.35\textwidth]{images/feynman.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -180,8 +180,8 @@ best such object --- it optimizes a certain property: the property of
|
|||||||
factorizing the projections of other such objects.
|
factorizing the projections of other such objects.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/productranking.jpg}
|
\includegraphics[width=0.35\textwidth]{images/productranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -200,7 +200,7 @@ built in, rather than being defined by universal constructions; although
|
|||||||
there have been attempts at creating categorical programming languages
|
there have been attempts at creating categorical programming languages
|
||||||
(see, e.g.,
|
(see, e.g.,
|
||||||
\urlref{http://web.sfc.keio.ac.jp/~hagino/thesis.pdf}{Tatsuya
|
\urlref{http://web.sfc.keio.ac.jp/~hagino/thesis.pdf}{Tatsuya
|
||||||
Hagino's thesis}).
|
Hagino's thesis}).
|
||||||
|
|
||||||
Whether used directly or not, categorical definitions justify
|
Whether used directly or not, categorical definitions justify
|
||||||
pre-existing programming constructs, and give rise to new ones. Most
|
pre-existing programming constructs, and give rise to new ones. Most
|
||||||
|
@ -8,8 +8,8 @@ Now that we know more about \hyperref[functors]{functors} and
|
|||||||
try.
|
try.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/productpattern.jpg}
|
\includegraphics[width=0.3\textwidth]{images/productpattern.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -36,8 +36,8 @@ nothing can stop us from using categories more complex than $\cat{2}$
|
|||||||
to define our patterns.
|
to define our patterns.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/two.jpg}
|
\includegraphics[width=0.35\textwidth]{images/two.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -52,8 +52,8 @@ done with $\Delta_c$. Remember, $\Delta_c$ maps all
|
|||||||
objects into $c$ and all morphisms into $\idarrow[c]$.
|
objects into $c$ and all morphisms into $\idarrow[c]$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/twodelta.jpg}
|
\includegraphics[width=0.35\textwidth]{images/twodelta.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -75,8 +75,8 @@ is trivially satisfied, because there are no morphisms (other than the
|
|||||||
identities) in $\cat{2}$.
|
identities) in $\cat{2}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/productcone.jpg}
|
\includegraphics[width=0.35\textwidth]{images/productcone.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -99,8 +99,8 @@ morphisms connecting $c$ to the diagram: the image of $\cat{I}$
|
|||||||
under $D$.
|
under $D$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/cone.jpg}
|
\includegraphics[width=0.35\textwidth]{images/cone.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -114,13 +114,13 @@ the naturality square becomes a commuting triangle. The two arms of this
|
|||||||
triangle are the components of the natural transformation.
|
triangle are the components of the natural transformation.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/conenaturality.jpg}
|
\includegraphics[width=0.35\textwidth]{images/conenaturality.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
So that's one cone. What we are interested in is the \newterm{universal
|
So that's one cone. What we are interested in is the \newterm{universal
|
||||||
cone} --- just like we picked a universal object for our definition of a
|
cone} --- just like we picked a universal object for our definition of a
|
||||||
product.
|
product.
|
||||||
|
|
||||||
There are many ways to go about it. For instance, we may define a
|
There are many ways to go about it. For instance, we may define a
|
||||||
@ -139,19 +139,19 @@ instance:
|
|||||||
\src{snippet01}
|
\src{snippet01}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/productranking.jpg}
|
\includegraphics[width=0.35\textwidth]{images/productranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
This condition translates, in the general case, to the condition that
|
This condition translates, in the general case, to the condition that
|
||||||
the triangles whose one side is the factorizing morphism all commute.
|
the triangles whose one side is the factorizing morphism all commute.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/conecommutativity.jpg}
|
\includegraphics[width=0.35\textwidth]{images/conecommutativity.jpg}
|
||||||
\caption{The commuting triangle connecting two cones, with the factorizing
|
\caption{The commuting triangle connecting two cones, with the factorizing
|
||||||
morphism $h$ (here, the lower cone is the universal one, with
|
morphism $h$ (here, the lower cone is the universal one, with
|
||||||
$\Lim[D]$ as its apex)}
|
$\Lim[D]$ as its apex)}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -231,8 +231,8 @@ Notice the inversion in the order of $c$ and $c'$
|
|||||||
characteristic of a \emph{contravariant} functor.
|
characteristic of a \emph{contravariant} functor.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/homsetmapping.jpg}
|
\includegraphics[width=0.35\textwidth]{images/homsetmapping.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -265,8 +265,8 @@ It's relatively easy to show that those components indeed add up to a
|
|||||||
natural transformation.
|
natural transformation.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/natmapping.jpg}
|
\includegraphics[width=0.4\textwidth]{images/natmapping.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -346,8 +346,8 @@ and two projections:
|
|||||||
\src{snippet04}
|
\src{snippet04}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/equalizercone.jpg}
|
\includegraphics[width=0.35\textwidth]{images/equalizercone.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -401,8 +401,8 @@ with
|
|||||||
\src{snippet12}
|
\src{snippet12}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/equilizerlimit.jpg}
|
\includegraphics[width=0.35\textwidth]{images/equilizerlimit.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -428,8 +428,8 @@ and three morphisms:
|
|||||||
\src{snippet14}
|
\src{snippet14}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/pullbackcone.jpg}
|
\includegraphics[width=0.35\textwidth]{images/pullbackcone.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -441,8 +441,8 @@ So we are only left with the following condition:
|
|||||||
A pullback is a universal cone of this shape.
|
A pullback is a universal cone of this shape.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/pullbacklimit.jpg}
|
\includegraphics[width=0.35\textwidth]{images/pullbacklimit.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -490,8 +490,8 @@ overriding of methods. And, of course, there would be no pullback if
|
|||||||
there is a name conflict between some methods of \code{B} and \code{C}.
|
there is a name conflict between some methods of \code{B} and \code{C}.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.25\textwidth]{images/classes.jpg}
|
\includegraphics[width=0.25\textwidth]{images/classes.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -551,9 +551,9 @@ morphism, which now flows from the universal co-cone to any other
|
|||||||
co-cone.
|
co-cone.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/colimit.jpg}
|
\includegraphics[width=0.35\textwidth]{images/colimit.jpg}
|
||||||
\caption{Cocone with a factorizing morphism $h$ connecting two apexes.}
|
\caption{Cocone with a factorizing morphism $h$ connecting two apexes.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -562,8 +562,8 @@ diagram generated by $\cat{2}$, the category we've used in the
|
|||||||
definition of the product.
|
definition of the product.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/coproductranking.jpg}
|
\includegraphics[width=0.35\textwidth]{images/coproductranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -582,7 +582,7 @@ $1\leftarrow2\rightarrow3$.
|
|||||||
I said previously that functors come close to the idea of continuous
|
I said previously that functors come close to the idea of continuous
|
||||||
mappings of categories, in the sense that they never break existing
|
mappings of categories, in the sense that they never break existing
|
||||||
connections (morphisms). The actual definition of a \emph{continuous
|
connections (morphisms). The actual definition of a \emph{continuous
|
||||||
functor} $F$ from a category $\cat{C}$ to $\cat{C'}$ includes the
|
functor} $F$ from a category $\cat{C}$ to $\cat{C'}$ includes the
|
||||||
requirement that the functor preserve limits. Every diagram $D$
|
requirement that the functor preserve limits. Every diagram $D$
|
||||||
in $\cat{C}$ can be mapped to a diagram $F \circ D$ in $\cat{C'}$ by
|
in $\cat{C}$ can be mapped to a diagram $F \circ D$ in $\cat{C'}$ by
|
||||||
simply composing two functors. The continuity condition for $F$
|
simply composing two functors. The continuity condition for $F$
|
||||||
@ -591,8 +591,8 @@ the diagram $F \circ D$ also has a limit, and it is equal to
|
|||||||
$F (\Lim[D])$.
|
$F (\Lim[D])$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.6\textwidth]{images/continuity.jpg}
|
\includegraphics[width=0.6\textwidth]{images/continuity.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -650,23 +650,23 @@ poset).
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
How would you describe a pushout in the category of C++ classes?
|
How would you describe a pushout in the category of C++ classes?
|
||||||
\item
|
\item
|
||||||
Show that the limit of the identity functor
|
Show that the limit of the identity functor
|
||||||
$\mathbf{Id} \Colon \cat{C} \to \cat{C}$ is the initial object.
|
$\mathbf{Id} \Colon \cat{C} \to \cat{C}$ is the initial object.
|
||||||
\item
|
\item
|
||||||
Subsets of a given set form a category. A morphism in that category is
|
Subsets of a given set form a category. A morphism in that category is
|
||||||
defined to be an arrow connecting two sets if the first is the subset
|
defined to be an arrow connecting two sets if the first is the subset
|
||||||
of the second. What is a pullback of two sets in such a category?
|
of the second. What is a pullback of two sets in such a category?
|
||||||
What's a pushout? What are the initial and terminal objects?
|
What's a pushout? What are the initial and terminal objects?
|
||||||
\item
|
\item
|
||||||
Can you guess what a coequalizer is?
|
Can you guess what a coequalizer is?
|
||||||
\item
|
\item
|
||||||
Show that, in a category with a terminal object, a pullback towards
|
Show that, in a category with a terminal object, a pullback towards
|
||||||
the terminal object is a product.
|
the terminal object is a product.
|
||||||
\item
|
\item
|
||||||
Similarly, show that a pushout from an initial object (if one exists)
|
Similarly, show that a pushout from an initial object (if one exists)
|
||||||
is the coproduct.
|
is the coproduct.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -43,8 +43,8 @@ $(b, b)$, and end up with the set
|
|||||||
$\{e, a, b, (a, a), (a, b), (b, a), (b, b)\}$.
|
$\{e, a, b, (a, a), (a, b), (b, a), (b, b)\}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.8\textwidth]{images/bunnies.jpg}
|
\includegraphics[width=0.8\textwidth]{images/bunnies.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -63,7 +63,7 @@ This kind of construction, in which you keep generating all possible
|
|||||||
combinations of elements, and perform the minimum number of
|
combinations of elements, and perform the minimum number of
|
||||||
identifications --- just enough to uphold the laws --- is called a free
|
identifications --- just enough to uphold the laws --- is called a free
|
||||||
construction. What we have just done is to construct a \newterm{free
|
construction. What we have just done is to construct a \newterm{free
|
||||||
monoid} from the set of generators $\{a, b\}$.
|
monoid} from the set of generators $\{a, b\}$.
|
||||||
|
|
||||||
\section{Free Monoid in Haskell}
|
\section{Free Monoid in Haskell}
|
||||||
|
|
||||||
@ -186,8 +186,8 @@ collapse them). It will all be sorted out by the universal construction,
|
|||||||
which will pick the best representative of this pattern.
|
which will pick the best representative of this pattern.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/monoid-pattern.jpg}
|
\includegraphics[width=0.4\textwidth]{images/monoid-pattern.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -213,8 +213,8 @@ be mapped to a product of the corresponding two generators in the second
|
|||||||
monoid, and so on.
|
monoid, and so on.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/monoid-ranking.jpg}
|
\includegraphics[width=0.4\textwidth]{images/monoid-ranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -222,11 +222,11 @@ This ranking may be used to find the best candidate --- the free monoid.
|
|||||||
Here's the definition:
|
Here's the definition:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
We'll say that $m$ (together with the function $p$) is the
|
We'll say that $m$ (together with the function $p$) is the
|
||||||
\textbf{free monoid} with the generators $x$ if and only if there
|
\textbf{free monoid} with the generators $x$ if and only if there
|
||||||
is a \emph{unique} morphism $h$ from $m$ to any other
|
is a \emph{unique} morphism $h$ from $m$ to any other
|
||||||
monoid $n$ (together with the function $q$) that satisfies
|
monoid $n$ (together with the function $q$) that satisfies
|
||||||
the above factorization property.
|
the above factorization property.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
Incidentally, this answers our second question. The function
|
Incidentally, this answers our second question. The function
|
||||||
$U h$ is the one that has the power to collapse multiple
|
$U h$ is the one that has the power to collapse multiple
|
||||||
@ -242,13 +242,13 @@ We'll come back to free monoids when we talk about adjunctions.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
You might think (as I did, originally) that the requirement that a
|
You might think (as I did, originally) that the requirement that a
|
||||||
homomorphism of monoids preserve the unit is redundant. After all, we
|
homomorphism of monoids preserve the unit is redundant. After all, we
|
||||||
know that for all $a$
|
know that for all $a$
|
||||||
|
|
||||||
\begin{snip}{text}
|
\begin{snip}{text}
|
||||||
h a * h e = h (a * e) = h a
|
h a * h e = h (a * e) = h a
|
||||||
\end{snip}
|
\end{snip}
|
||||||
So $h e$ acts like a right unit (and, by analogy, as a left
|
So $h e$ acts like a right unit (and, by analogy, as a left
|
||||||
@ -257,7 +257,7 @@ h a * h e = h (a * e) = h a
|
|||||||
unit outside of the image of $h$. Show that an isomorphism
|
unit outside of the image of $h$. Show that an isomorphism
|
||||||
between monoids that preserves multiplication must automatically
|
between monoids that preserves multiplication must automatically
|
||||||
preserve unit.
|
preserve unit.
|
||||||
\item
|
\item
|
||||||
Consider a monoid homomorphism from lists of integers with
|
Consider a monoid homomorphism from lists of integers with
|
||||||
concatenation to integers with multiplication. What is the image of
|
concatenation to integers with multiplication. What is the image of
|
||||||
the empty list \code{{[}{]}}? Assume that all singleton lists are
|
the empty list \code{{[}{]}}? Assume that all singleton lists are
|
||||||
@ -265,7 +265,7 @@ h a * h e = h (a * e) = h a
|
|||||||
mapped to 3, etc. What's the image of \code{{[}1, 2, 3, 4{]}}?
|
mapped to 3, etc. What's the image of \code{{[}1, 2, 3, 4{]}}?
|
||||||
How many different lists map to the integer 12? Is there any other
|
How many different lists map to the integer 12? Is there any other
|
||||||
homomorphism between the two monoids?
|
homomorphism between the two monoids?
|
||||||
\item
|
\item
|
||||||
What is the free monoid generated by a one-element set? Can you see
|
What is the free monoid generated by a one-element set? Can you see
|
||||||
what it's isomorphic to?
|
what it's isomorphic to?
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -58,8 +58,8 @@ fixed, $\cat{C}(a, x)$ will also vary in $\Set$. Thus we have a
|
|||||||
mapping from $x$ to $\Set$.
|
mapping from $x$ to $\Set$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.45\textwidth]{images/hom-set.jpg}
|
\includegraphics[width=0.45\textwidth]{images/hom-set.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -85,8 +85,8 @@ is a morphism going from $a$ to $y$. It is therefore a
|
|||||||
member of $\cat{C}(a, y)$.
|
member of $\cat{C}(a, y)$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.45\textwidth]{images/hom-functor.jpg}
|
\includegraphics[width=0.45\textwidth]{images/hom-functor.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -297,24 +297,24 @@ explore alternative implementations that have practical value.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show that the hom-functors map identity morphisms in \emph{C} to
|
Show that the hom-functors map identity morphisms in \emph{C} to
|
||||||
corresponding identity functions in $\Set$.
|
corresponding identity functions in $\Set$.
|
||||||
\item
|
\item
|
||||||
Show that \code{Maybe} is not representable.
|
Show that \code{Maybe} is not representable.
|
||||||
\item
|
\item
|
||||||
Is the \code{Reader} functor representable?
|
Is the \code{Reader} functor representable?
|
||||||
\item
|
\item
|
||||||
Using \code{Stream} representation, memoize a function that squares
|
Using \code{Stream} representation, memoize a function that squares
|
||||||
its argument.
|
its argument.
|
||||||
\item
|
\item
|
||||||
Show that \code{tabulate} and \code{index} for \code{Stream} are
|
Show that \code{tabulate} and \code{index} for \code{Stream} are
|
||||||
indeed the inverse of each other. (Hint: use induction.)
|
indeed the inverse of each other. (Hint: use induction.)
|
||||||
\item
|
\item
|
||||||
The functor:
|
The functor:
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
Pair a = Pair a a
|
Pair a = Pair a a
|
||||||
\end{snip}
|
\end{snip}
|
||||||
is representable. Can you guess the type that represents it? Implement
|
is representable. Can you guess the type that represents it? Implement
|
||||||
@ -324,8 +324,8 @@ Pair a = Pair a a
|
|||||||
\section{Bibliography}
|
\section{Bibliography}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
The Catsters video about
|
The Catsters video about
|
||||||
\urlref{https://www.youtube.com/watch?v=4QgjKUzyrhM}{representable
|
\urlref{https://www.youtube.com/watch?v=4QgjKUzyrhM}{representable
|
||||||
functors}.
|
functors}.
|
||||||
|
@ -50,13 +50,13 @@ $\alpha$. Because we are operating in $\Set$, the components of
|
|||||||
the natural transformation, like $\alpha_x$ or $\alpha_y$, are just
|
the natural transformation, like $\alpha_x$ or $\alpha_y$, are just
|
||||||
regular functions between sets:
|
regular functions between sets:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\alpha_x \Colon \cat{C}(a, x) \to F x \\
|
\alpha_x \Colon \cat{C}(a, x) \to F x \\
|
||||||
\alpha_y \Colon \cat{C}(a, y) \to F y
|
\alpha_y \Colon \cat{C}(a, y) \to F y
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/yoneda1.png}
|
\includegraphics[width=0.4\textwidth]{images/yoneda1.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -79,8 +79,8 @@ Just how strong this condition is can be seen by specializing it to the
|
|||||||
case of $x = a$.
|
case of $x = a$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/yoneda2.png}
|
\includegraphics[width=0.4\textwidth]{images/yoneda2.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -102,8 +102,8 @@ get a point in $F a$.
|
|||||||
We have just proven the Yoneda lemma:
|
We have just proven the Yoneda lemma:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
There is a one-to-one correspondence between natural transformations
|
There is a one-to-one correspondence between natural transformations
|
||||||
from $\cat{C}(a, -)$ to $F$ and elements of $F a$.
|
from $\cat{C}(a, -)$ to $F$ and elements of $F a$.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
in other words,
|
in other words,
|
||||||
\[\cat{Nat}(\cat{C}(a, -), F) \cong F a\]
|
\[\cat{Nat}(\cat{C}(a, -), F) \cong F a\]
|
||||||
@ -133,8 +133,8 @@ $p$ to some point $q$ in $F a$. I'll show you that
|
|||||||
any choice of $q$ leads to a unique natural transformation.
|
any choice of $q$ leads to a unique natural transformation.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/yoneda3.png}
|
\includegraphics[width=0.3\textwidth]{images/yoneda3.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -149,8 +149,8 @@ hom-functor, the morphism $g$ is mapped to a function
|
|||||||
$\cat{C}(a, g)$; and under $F$ it's mapped to $F g$.
|
$\cat{C}(a, g)$; and under $F$ it's mapped to $F g$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/yoneda4.png}
|
\includegraphics[width=0.4\textwidth]{images/yoneda4.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -186,8 +186,8 @@ Since $p'$ was arbitrary, the whole function $\alpha_x$ is
|
|||||||
thus determined.
|
thus determined.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/yoneda5.png}
|
\includegraphics[width=0.4\textwidth]{images/yoneda5.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -315,23 +315,23 @@ called the Yoneda lemma.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show that the two functions \code{phi} and \code{psi} that form
|
Show that the two functions \code{phi} and \code{psi} that form
|
||||||
the Yoneda isomorphism in Haskell are inverses of each other.
|
the Yoneda isomorphism in Haskell are inverses of each other.
|
||||||
|
|
||||||
\begin{snip}{haskell}
|
\begin{snip}{haskell}
|
||||||
phi :: (forall x . (a -> x) -> F x) -> F a
|
phi :: (forall x . (a -> x) -> F x) -> F a
|
||||||
phi alpha = alpha id
|
phi alpha = alpha id
|
||||||
|
|
||||||
psi :: F a -> (forall x . (a -> x) -> F x)
|
psi :: F a -> (forall x . (a -> x) -> F x)
|
||||||
psi fa h = fmap h fa
|
psi fa h = fmap h fa
|
||||||
\end{snip}
|
\end{snip}
|
||||||
\item
|
\item
|
||||||
A discrete category is one that has objects but no morphisms other
|
A discrete category is one that has objects but no morphisms other
|
||||||
than identity morphisms. How does the Yoneda lemma work for functors
|
than identity morphisms. How does the Yoneda lemma work for functors
|
||||||
from such a category?
|
from such a category?
|
||||||
\item
|
\item
|
||||||
A list of units \code{{[}(){]}} contains no other information but
|
A list of units \code{{[}(){]}} contains no other information but
|
||||||
its length. So, as a data type, it can be considered an encoding of
|
its length. So, as a data type, it can be considered an encoding of
|
||||||
integers. An empty list encodes zero, a singleton \code{{[}(){]}} (a
|
integers. An empty list encodes zero, a singleton \code{{[}(){]}} (a
|
||||||
@ -343,7 +343,7 @@ psi fa h = fmap h fa
|
|||||||
\section{Bibliography}
|
\section{Bibliography}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
\urlref{https://www.youtube.com/watch?v=TLMxHB19khE}{Catsters} video.
|
\urlref{https://www.youtube.com/watch?v=TLMxHB19khE}{Catsters} video.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -15,11 +15,11 @@ It's a mapping of objects from category $\cat{C}$ to functors, which are
|
|||||||
\emph{objects} in the functor category (see the section about functor
|
\emph{objects} in the functor category (see the section about functor
|
||||||
categories in
|
categories in
|
||||||
\hyperref[natural-transformations]{Natural
|
\hyperref[natural-transformations]{Natural
|
||||||
Transformations}). Let's use the notation $[\cat{C}, \Set]$ for the
|
Transformations}). Let's use the notation $[\cat{C}, \Set]$ for the
|
||||||
functor category from $\cat{C}$ to $\Set$. You may also recall that
|
functor category from $\cat{C}$ to $\Set$. You may also recall that
|
||||||
hom-functors are the prototypical
|
hom-functors are the prototypical
|
||||||
\hyperref[representable-functors]{representable
|
\hyperref[representable-functors]{representable
|
||||||
functors}.
|
functors}.
|
||||||
|
|
||||||
Every time we have a mapping of objects between two categories, it's
|
Every time we have a mapping of objects between two categories, it's
|
||||||
natural to ask if such a mapping is also a functor. In other words
|
natural to ask if such a mapping is also a functor. In other words
|
||||||
@ -41,8 +41,8 @@ and replace the generic $F$ with the hom-functor
|
|||||||
$\cat{C}(b, -)$. We get:
|
$\cat{C}(b, -)$. We get:
|
||||||
\[[\cat{C}, \Set](\cat{C}(a, -), \cat{C}(b, -)) \cong \cat{C}(b, a)\]
|
\[[\cat{C}, \Set](\cat{C}(a, -), \cat{C}(b, -)) \cong \cat{C}(b, a)\]
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.6\textwidth]{images/yoneda-embedding.jpg}
|
\includegraphics[width=0.6\textwidth]{images/yoneda-embedding.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -53,8 +53,8 @@ $\cat{C}(b, a)$ --- that goes in the ``wrong'' direction. But that's
|
|||||||
okay; it only means that the functor we are looking at is contravariant.
|
okay; it only means that the functor we are looking at is contravariant.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.65\textwidth]{images/yoneda-embedding-2.jpg}
|
\includegraphics[width=0.65\textwidth]{images/yoneda-embedding-2.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -105,7 +105,7 @@ hom-set, $\cat{C}(-, a)$. That would give us a contravariant
|
|||||||
hom-functor. Contravariant functors from $\cat{C}$ to $\Set$ are
|
hom-functor. Contravariant functors from $\cat{C}$ to $\Set$ are
|
||||||
our familiar presheaves (see, for instance,
|
our familiar presheaves (see, for instance,
|
||||||
\hyperref[limits-and-colimits]{Limits
|
\hyperref[limits-and-colimits]{Limits
|
||||||
and Colimits}). The co-Yoneda embedding defines the embedding of a
|
and Colimits}). The co-Yoneda embedding defines the embedding of a
|
||||||
category $\cat{C}$ in the category of presheaves. Its action on morphisms
|
category $\cat{C}$ in the category of presheaves. Its action on morphisms
|
||||||
is given by:
|
is given by:
|
||||||
\[[\cat{C}, \Set](\cat{C}(-, a), \cat{C}(-, b)) \cong \cat{C}(a, b)\]
|
\[[\cat{C}, \Set](\cat{C}(-, a), \cat{C}(-, b)) \cong \cat{C}(a, b)\]
|
||||||
@ -275,21 +275,21 @@ to go wrong.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Express the co-Yoneda embedding in Haskell.
|
Express the co-Yoneda embedding in Haskell.
|
||||||
\item
|
\item
|
||||||
Show that the bijection we established between \code{fromY} and
|
Show that the bijection we established between \code{fromY} and
|
||||||
\code{btoa} is an isomorphism (the two mappings are the inverse of
|
\code{btoa} is an isomorphism (the two mappings are the inverse of
|
||||||
each other).
|
each other).
|
||||||
\item
|
\item
|
||||||
Work out the Yoneda embedding for a monoid. What functor corresponds
|
Work out the Yoneda embedding for a monoid. What functor corresponds
|
||||||
to the monoid's single object? What natural transformations correspond
|
to the monoid's single object? What natural transformations correspond
|
||||||
to monoid morphisms?
|
to monoid morphisms?
|
||||||
\item
|
\item
|
||||||
What is the application of the \emph{covariant} Yoneda embedding to
|
What is the application of the \emph{covariant} Yoneda embedding to
|
||||||
preorders? (Question suggested by Gershom Bazerman.)
|
preorders? (Question suggested by Gershom Bazerman.)
|
||||||
\item
|
\item
|
||||||
Yoneda embedding can be used to embed an arbitrary functor category
|
Yoneda embedding can be used to embed an arbitrary functor category
|
||||||
$[\cat{C}, \cat{D}]$ in the functor category
|
$[\cat{C}, \cat{D}]$ in the functor category
|
||||||
$[[\cat{C}, \cat{D}], \Set]$. Figure out how it works on morphisms
|
$[[\cat{C}, \cat{D}], \Set]$. Figure out how it works on morphisms
|
||||||
|
@ -38,8 +38,8 @@ $c$, together with a pair of morphisms $p$ and $q$,
|
|||||||
that has the universal property of being their product.
|
that has the universal property of being their product.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/productranking.jpg}
|
\includegraphics[width=0.3\textwidth]{images/productranking.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -59,8 +59,8 @@ functors $F$ and $G$. The other sides are the components
|
|||||||
of the natural transformation (which are also morphisms).
|
of the natural transformation (which are also morphisms).
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/3_naturality.jpg}
|
\includegraphics[width=0.35\textwidth]{images/3_naturality.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -77,8 +77,8 @@ transformation maps one such sheet corresponding to F, to another,
|
|||||||
corresponding to G.
|
corresponding to G.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/sheets.png}
|
\includegraphics[width=0.35\textwidth]{images/sheets.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -204,8 +204,8 @@ empty. We can visualize a general category as a ``thick'' preorder.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Consider some degenerate cases of a naturality condition and draw the
|
Consider some degenerate cases of a naturality condition and draw the
|
||||||
appropriate diagrams. For instance, what happens if either functor
|
appropriate diagrams. For instance, what happens if either functor
|
||||||
$F$ or $G$ map both objects $a$ and $b$
|
$F$ or $G$ map both objects $a$ and $b$
|
||||||
|
@ -77,8 +77,8 @@ family of morphisms:
|
|||||||
for which the following diagram commutes, for any $f \Colon a \to b$:
|
for which the following diagram commutes, for any $f \Colon a \to b$:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/end.jpg}
|
\includegraphics[width=0.35\textwidth]{images/end.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -89,8 +89,8 @@ from two naturality squares and one functoriality condition (profunctor
|
|||||||
$q$ preserving composition):
|
$q$ preserving composition):
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/end-1.jpg}
|
\includegraphics[width=0.4\textwidth]{images/end-1.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -126,8 +126,8 @@ $p\ a\ b$. We therefore insist that the following diagram
|
|||||||
commute:
|
commute:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/end-2.jpg}
|
\includegraphics[width=0.4\textwidth]{images/end-2.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -144,8 +144,8 @@ $h \Colon a \to e$ that makes all triangles commute:
|
|||||||
\[\pi_a \circ h = \alpha_a\]
|
\[\pi_a \circ h = \alpha_a\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/end-21.jpg}
|
\includegraphics[width=0.4\textwidth]{images/end-21.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -245,8 +245,8 @@ is a profunctor, so it makes sense to study its end. This is the wedge
|
|||||||
condition:
|
condition:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/end1.jpg}
|
\includegraphics[width=0.4\textwidth]{images/end1.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -254,8 +254,8 @@ Let's just pick one element from the set $\int_c \cat{C}(F\ c, G\ c)$.
|
|||||||
The two projections will map this element to two components of a
|
The two projections will map this element to two components of a
|
||||||
particular transformation, let's call them:
|
particular transformation, let's call them:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\tau_a &\Colon F\ a \to G\ a \\
|
\tau_a & \Colon F\ a \to G\ a \\
|
||||||
\tau_b &\Colon F\ b \to G\ b
|
\tau_b & \Colon F\ b \to G\ b
|
||||||
\end{align*}
|
\end{align*}
|
||||||
In the left branch, we lift a pair of morphisms
|
In the left branch, we lift a pair of morphisms
|
||||||
$\langle \idarrow[a], G\ f \rangle$ using the hom-functor. You
|
$\langle \idarrow[a], G\ f \rangle$ using the hom-functor. You
|
||||||
@ -273,9 +273,9 @@ from a dual to a wedge called a cowedge (pronounced co-wedge, not
|
|||||||
cow-edge).
|
cow-edge).
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.25\textwidth]{images/end-31.jpg}
|
\includegraphics[width=0.25\textwidth]{images/end-31.jpg}
|
||||||
\caption{An edgy cow?}
|
\caption{An edgy cow?}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -336,9 +336,9 @@ known as taking a quotient. To define a quotient we need an
|
|||||||
\newterm{equivalence relation} $\sim$, a relation that
|
\newterm{equivalence relation} $\sim$, a relation that
|
||||||
is reflexive, symmetric, and transitive:
|
is reflexive, symmetric, and transitive:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
&a \sim a \\
|
& a \sim a \\
|
||||||
&\text{if}\ a \sim b\ \text{then}\ b \sim a \\
|
& \text{if}\ a \sim b\ \text{then}\ b \sim a \\
|
||||||
&\text{if}\ a \sim b\ \text{and}\ b \sim c\ \text{then}\ a \sim c
|
& \text{if}\ a \sim b\ \text{and}\ b \sim c\ \text{then}\ a \sim c
|
||||||
\end{align*}
|
\end{align*}
|
||||||
Such a relation splits the set into equivalence classes. Each class
|
Such a relation splits the set into equivalence classes. Each class
|
||||||
consists of elements that are related to each other. We form a quotient
|
consists of elements that are related to each other. We form a quotient
|
||||||
|
@ -16,8 +16,8 @@ category $\cat{I}$ to $\cat{C}$. This is the functor that selects the base
|
|||||||
of the cone --- the diagram functor.
|
of the cone --- the diagram functor.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan2.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan2.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -29,8 +29,8 @@ morphism. Any functor $F$ from $\cat{1}$ to $\cat{C}$ picks a
|
|||||||
potential apex for our cone.
|
potential apex for our cone.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan15.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan15.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -40,8 +40,8 @@ our original $\Delta_c$. The following diagram shows this
|
|||||||
transformation.
|
transformation.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan3-e1492120491591.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan3-e1492120491591.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -59,8 +59,8 @@ transformation $\varepsilon'$ from $F' \circ K$ to
|
|||||||
$D$.
|
$D$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan31-e1492120512209.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan31-e1492120512209.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -74,8 +74,8 @@ on $K$). This transformation is then vertically composed with
|
|||||||
$\varepsilon$.
|
$\varepsilon$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan5.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan5.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -108,8 +108,8 @@ that factorizes $\varepsilon'$:
|
|||||||
This is quite a mouthful, but it can be visualized in this nice diagram:
|
This is quite a mouthful, but it can be visualized in this nice diagram:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan7.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan7.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -133,8 +133,8 @@ just half of it, namely a one-way natural transformation $\varepsilon$ from
|
|||||||
$F \circ K$ to $D$. (The left Kan extension picks the other direction.)
|
$F \circ K$ to $D$. (The left Kan extension picks the other direction.)
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan6.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan6.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -158,8 +158,8 @@ transformation we called $\varepsilon'$ corresponds a unique natural
|
|||||||
transformation we called $\sigma$.
|
transformation we called $\sigma$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan92.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan92.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -205,8 +205,8 @@ cocone by using the functor $D \Colon \cat{I} \to \cat{C}$ to form its
|
|||||||
base, and the functor $F \Colon \cat{1} \to \cat{C}$ to select its apex.
|
base, and the functor $F \Colon \cat{1} \to \cat{C}$ to select its apex.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan81.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan81.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -214,8 +214,8 @@ The sides of the cocone, the injections, are components of a natural
|
|||||||
transformation $\eta$ from $D$ to $F \circ K$.
|
transformation $\eta$ from $D$ to $F \circ K$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan10a.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan10a.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -224,16 +224,16 @@ $F'$ and a natural transformation
|
|||||||
\[\eta' \Colon D \to F' \circ K\]
|
\[\eta' \Colon D \to F' \circ K\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan10b.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan10b.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
there is a unique natural transformation $\sigma$ from $F$ to $F'$
|
there is a unique natural transformation $\sigma$ from $F$ to $F'$
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan14.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan14.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -242,8 +242,8 @@ such that:
|
|||||||
This is illustrated in the following diagram:
|
This is illustrated in the following diagram:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan112.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan112.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -252,8 +252,8 @@ definition naturally generalized to the definition of the left Kan
|
|||||||
extension, denoted by $\Lan_{K}D$.
|
extension, denoted by $\Lan_{K}D$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan12.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan12.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -300,8 +300,8 @@ hom-functor:
|
|||||||
\[\cat{A}(a, K\ -)\]
|
\[\cat{A}(a, K\ -)\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/kan13.jpg}
|
\includegraphics[width=0.4\textwidth]{images/kan13.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -423,15 +423,15 @@ Notice that, as described earlier in the general case, we performed the
|
|||||||
following steps:
|
following steps:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Retrieved the container of \code{x} (here, it's
|
Retrieved the container of \code{x} (here, it's
|
||||||
just a trivial identity container), and the function \code{f}.
|
just a trivial identity container), and the function \code{f}.
|
||||||
\item
|
\item
|
||||||
Repackaged the container using the natural transformation between the
|
Repackaged the container using the natural transformation between the
|
||||||
identity functor and the pair functor.
|
identity functor and the pair functor.
|
||||||
\item
|
\item
|
||||||
Called the function \code{f}.
|
Called the function \code{f}.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
|
||||||
\section{Free Functor}
|
\section{Free Functor}
|
||||||
@ -469,7 +469,7 @@ recording both the function and its argument. It accumulates the lifted
|
|||||||
functions by recording their composition. Functor rules are
|
functions by recording their composition. Functor rules are
|
||||||
automatically satisfied. This construction was used in a paper
|
automatically satisfied. This construction was used in a paper
|
||||||
\urlref{http://okmij.org/ftp/Haskell/extensible/more.pdf}{Freer Monads,
|
\urlref{http://okmij.org/ftp/Haskell/extensible/more.pdf}{Freer Monads,
|
||||||
More Extensible Effects}.
|
More Extensible Effects}.
|
||||||
|
|
||||||
Alternatively, we can use the right Kan extension for the same purpose:
|
Alternatively, we can use the right Kan extension for the same purpose:
|
||||||
|
|
||||||
|
@ -84,37 +84,37 @@ that serves as the unit of the tensor product; again, up to natural
|
|||||||
isomorphism. The two isomorphisms are called, respectively, the left and
|
isomorphism. The two isomorphisms are called, respectively, the left and
|
||||||
the right unitor, and their components are:
|
the right unitor, and their components are:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\lambda_a &\Colon i \otimes a \to a \\
|
\lambda_a & \Colon i \otimes a \to a \\
|
||||||
\rho_a &\Colon a \otimes i \to a
|
\rho_a & \Colon a \otimes i \to a
|
||||||
\end{align*}
|
\end{align*}
|
||||||
The associator and the unitors must satisfy coherence conditions:
|
The associator and the unitors must satisfy coherence conditions:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[row sep=large]
|
\begin{tikzcd}[row sep=large]
|
||||||
((a \otimes b) \otimes c) \otimes d
|
((a \otimes b) \otimes c) \otimes d
|
||||||
\arrow[d, "\alpha_{(a \otimes b)cd}"]
|
\arrow[d, "\alpha_{(a \otimes b)cd}"]
|
||||||
\arrow[rr, "\alpha_{abc} \otimes \id_d"]
|
\arrow[rr, "\alpha_{abc} \otimes \id_d"]
|
||||||
& & (a \otimes (b \otimes c)) \otimes d
|
& & (a \otimes (b \otimes c)) \otimes d
|
||||||
\arrow[d, "\alpha_{a(b \otimes c)d}"] \\
|
\arrow[d, "\alpha_{a(b \otimes c)d}"] \\
|
||||||
(a \otimes b) \otimes (c \otimes d)
|
(a \otimes b) \otimes (c \otimes d)
|
||||||
\arrow[rd, "\alpha_{ab(c \otimes d)}"]
|
\arrow[rd, "\alpha_{ab(c \otimes d)}"]
|
||||||
& & a \otimes ((b \otimes c) \otimes d)
|
& & a \otimes ((b \otimes c) \otimes d)
|
||||||
\arrow[ld, "\id_a \otimes \alpha_{bcd}"] \\
|
\arrow[ld, "\id_a \otimes \alpha_{bcd}"] \\
|
||||||
& a \otimes (b \otimes (c \otimes d))
|
& a \otimes (b \otimes (c \otimes d))
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[row sep=large]
|
\begin{tikzcd}[row sep=large]
|
||||||
(a \otimes i) \otimes b
|
(a \otimes i) \otimes b
|
||||||
\arrow[dr, "\rho_{a} \otimes \id_b"']
|
\arrow[dr, "\rho_{a} \otimes \id_b"']
|
||||||
\arrow[rr, "\alpha_{aib}"]
|
\arrow[rr, "\alpha_{aib}"]
|
||||||
& & a \otimes (i \otimes b)
|
& & a \otimes (i \otimes b)
|
||||||
\arrow[dl, "\id_a \otimes \lambda_b"] \\
|
\arrow[dl, "\id_a \otimes \lambda_b"] \\
|
||||||
& a \otimes b
|
& a \otimes b
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -135,7 +135,7 @@ that defines the internal hom in a monoidal category:
|
|||||||
\[\cat{V}(a \otimes b, c) \sim \cat{V}(a, [b, c])\]
|
\[\cat{V}(a \otimes b, c) \sim \cat{V}(a, [b, c])\]
|
||||||
Following
|
Following
|
||||||
\urlref{http://www.tac.mta.ca/tac/reprints/articles/10/tr10.pdf}{G. M.
|
\urlref{http://www.tac.mta.ca/tac/reprints/articles/10/tr10.pdf}{G. M.
|
||||||
Kelly}, I'm using the notation ${[}b, c{]}$ for the internal
|
Kelly}, I'm using the notation ${[}b, c{]}$ for the internal
|
||||||
hom. The counit of this adjunction is the natural transformation whose
|
hom. The counit of this adjunction is the natural transformation whose
|
||||||
components are called evaluation morphisms:
|
components are called evaluation morphisms:
|
||||||
\[\varepsilon_{a b} \Colon ([a, b] \otimes a) \to b\]
|
\[\varepsilon_{a b} \Colon ([a, b] \otimes a) \to b\]
|
||||||
@ -164,8 +164,8 @@ of morphisms is replaced by a family of morphisms in $\cat{V}$:
|
|||||||
\[\circ \Colon \cat{C}(b, c) \otimes \cat{C}(a, b) \to \cat{C}(a, c)\]
|
\[\circ \Colon \cat{C}(b, c) \otimes \cat{C}(a, b) \to \cat{C}(a, c)\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.45\textwidth]{images/composition.jpg}
|
\includegraphics[width=0.45\textwidth]{images/composition.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -175,8 +175,8 @@ $\cat{V}$:
|
|||||||
where $i$ is the tensor unit in $\cat{V}$.
|
where $i$ is the tensor unit in $\cat{V}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/id.jpg}
|
\includegraphics[width=0.4\textwidth]{images/id.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -184,19 +184,19 @@ Associativity of composition is defined in terms of the associator in
|
|||||||
$\cat{V}$:
|
$\cat{V}$:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[column sep=large]
|
\begin{tikzcd}[column sep=large]
|
||||||
(\cat{C}(c,d) \otimes \cat{C}(b,c)) \otimes \cat{C}(a,b)
|
(\cat{C}(c,d) \otimes \cat{C}(b,c)) \otimes \cat{C}(a,b)
|
||||||
\arrow[r, "\circ\otimes\id"]
|
\arrow[r, "\circ\otimes\id"]
|
||||||
\arrow[dd, "\alpha"]
|
\arrow[dd, "\alpha"]
|
||||||
& \cat{C}(b,d) \otimes \cat{C}(a,b)
|
& \cat{C}(b,d) \otimes \cat{C}(a,b)
|
||||||
\arrow[dr, "\circ"] \\
|
\arrow[dr, "\circ"] \\
|
||||||
& & \cat{C}(a,d) \\
|
& & \cat{C}(a,d) \\
|
||||||
\cat{C}(c,d) \otimes (\cat{C}(b,c) \otimes \cat{C}(a,b))
|
\cat{C}(c,d) \otimes (\cat{C}(b,c) \otimes \cat{C}(a,b))
|
||||||
\arrow[r, "\id\otimes\circ"]
|
\arrow[r, "\id\otimes\circ"]
|
||||||
& \cat{C}(c,d) \otimes \cat{C}(a,c)
|
& \cat{C}(c,d) \otimes \cat{C}(a,c)
|
||||||
\arrow[ur, "\circ"]
|
\arrow[ur, "\circ"]
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -271,7 +271,7 @@ enforced, if we implement a preorder as an enriched category.
|
|||||||
|
|
||||||
An interesting example is due to
|
An interesting example is due to
|
||||||
\urlref{http://www.tac.mta.ca/tac/reprints/articles/1/tr1.pdf}{William
|
\urlref{http://www.tac.mta.ca/tac/reprints/articles/1/tr1.pdf}{William
|
||||||
Lawvere}. He noticed that metric spaces can be defined using enriched
|
Lawvere}. He noticed that metric spaces can be defined using enriched
|
||||||
categories. A metric space defines a distance between any two objects.
|
categories. A metric space defines a distance between any two objects.
|
||||||
This distance is a non-negative real number. It's convenient to include
|
This distance is a non-negative real number. It's convenient to include
|
||||||
infinity as a possible value. If the distance is infinite, there is no
|
infinity as a possible value. If the distance is infinite, there is no
|
||||||
@ -344,8 +344,8 @@ meant preserving composition and identity. In the enriched setting, the
|
|||||||
preservation of composition means that the following diagram commute:
|
preservation of composition means that the following diagram commute:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[column sep=large, row sep=large]
|
\begin{tikzcd}[column sep=large, row sep=large]
|
||||||
\cat{C}(b,c) \otimes \cat{C}(a,b)
|
\cat{C}(b,c) \otimes \cat{C}(a,b)
|
||||||
\arrow[r, "\circ"]
|
\arrow[r, "\circ"]
|
||||||
\arrow[d, "F_{bc} \otimes F_{ab}"]
|
\arrow[d, "F_{bc} \otimes F_{ab}"]
|
||||||
@ -354,7 +354,7 @@ preservation of composition means that the following diagram commute:
|
|||||||
\cat{D}(F\ b, F\ c) \otimes \cat{D}(F\ a, F\ b)
|
\cat{D}(F\ b, F\ c) \otimes \cat{D}(F\ a, F\ b)
|
||||||
\arrow[r, "\circ"]
|
\arrow[r, "\circ"]
|
||||||
& \cat{D}(F\ a, F\ c)
|
& \cat{D}(F\ a, F\ c)
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -362,13 +362,13 @@ The preservation of identity is replaced by the preservation of the
|
|||||||
morphisms in $\cat{V}$ that ``select'' the identity:
|
morphisms in $\cat{V}$ that ``select'' the identity:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[row sep=large]
|
\begin{tikzcd}[row sep=large]
|
||||||
& i \arrow[dl, "j_a"'] \arrow[dr, "j_{F a}"] & \\
|
& i \arrow[dl, "j_a"'] \arrow[dr, "j_{F a}"] & \\
|
||||||
\cat{C}(a,a)
|
\cat{C}(a,a)
|
||||||
\arrow[rr, "F_{aa}"]
|
\arrow[rr, "F_{aa}"]
|
||||||
& & \cat{D}(F\ a, F\ a)
|
& & \cat{D}(F\ a, F\ a)
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\section{Self Enrichment}
|
\section{Self Enrichment}
|
||||||
|
@ -52,8 +52,8 @@ to be isomorphic to $a$. We can use this fact to define a subset
|
|||||||
as a family of injective functions that are related by isomorphisms of
|
as a family of injective functions that are related by isomorphisms of
|
||||||
their domains. More precisely, we say that two injective functions:
|
their domains. More precisely, we say that two injective functions:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
f &\Colon a \to b \\
|
f & \Colon a \to b \\
|
||||||
f' &\Colon a' \to b
|
f' & \Colon a' \to b
|
||||||
\end{align*}
|
\end{align*}
|
||||||
are equivalent if there is an isomorphism:
|
are equivalent if there is an isomorphism:
|
||||||
\[h \Colon a \to a'\]
|
\[h \Colon a \to a'\]
|
||||||
@ -62,8 +62,8 @@ such that:
|
|||||||
Such a family of equivalent injections defines a subset of $b$.
|
Such a family of equivalent injections defines a subset of $b$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/subsetinjection.jpg}
|
\includegraphics[width=0.4\textwidth]{images/subsetinjection.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -72,16 +72,16 @@ injective functions with monomorphism. Just to remind you, a
|
|||||||
monomorphism $m$ from $a$ to $b$ is defined by its
|
monomorphism $m$ from $a$ to $b$ is defined by its
|
||||||
universal property. For any object $c$ and any pair of morphisms:
|
universal property. For any object $c$ and any pair of morphisms:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
g &\Colon c \to a \\
|
g & \Colon c \to a \\
|
||||||
g' &\Colon c \to a
|
g' & \Colon c \to a
|
||||||
\end{align*}
|
\end{align*}
|
||||||
such that:
|
such that:
|
||||||
\[m\ .\ g = m\ .\ g'\]
|
\[m\ .\ g = m\ .\ g'\]
|
||||||
it must be that $g = g'$.
|
it must be that $g = g'$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/monomorphism.jpg}
|
\includegraphics[width=0.4\textwidth]{images/monomorphism.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -93,8 +93,8 @@ $g'$ that differ only at those two elements. The
|
|||||||
postcomposition with $m$ would then mask this difference.
|
postcomposition with $m$ would then mask this difference.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/notmono.jpg}
|
\includegraphics[width=0.4\textwidth]{images/notmono.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -112,8 +112,8 @@ $true$:
|
|||||||
\[true \Colon 1 \to \Omega\]
|
\[true \Colon 1 \to \Omega\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/true.jpg}
|
\includegraphics[width=0.4\textwidth]{images/true.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -136,8 +136,8 @@ and the injective function that embeds it in $b$. Here's the
|
|||||||
pullback diagram:
|
pullback diagram:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/pullback.jpg}
|
\includegraphics[width=0.4\textwidth]{images/pullback.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -197,13 +197,13 @@ representation is the object $\Omega$.
|
|||||||
A topos is a category that:
|
A topos is a category that:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Is Cartesian closed: It has all products, the terminal object, and
|
Is Cartesian closed: It has all products, the terminal object, and
|
||||||
exponentials (defined as right adjoints to products),
|
exponentials (defined as right adjoints to products),
|
||||||
\item
|
\item
|
||||||
Has limits for all finite diagrams,
|
Has limits for all finite diagrams,
|
||||||
\item
|
\item
|
||||||
Has a subobject classifier $\Omega$.
|
Has a subobject classifier $\Omega$.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
|
||||||
@ -251,8 +251,8 @@ logics.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Show that the function $f$ that is the pullback of
|
Show that the function $f$ that is the pullback of
|
||||||
$true$ along the characteristic function must be injective.
|
$true$ along the characteristic function must be injective.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -52,23 +52,23 @@ The derivation of Lawvere theories goes through many steps, so here's
|
|||||||
the roadmap:
|
the roadmap:
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Category of finite sets $\cat{FinSet}$.
|
Category of finite sets $\cat{FinSet}$.
|
||||||
\item
|
\item
|
||||||
Its skeleton $\cat{F}$.
|
Its skeleton $\cat{F}$.
|
||||||
\item
|
\item
|
||||||
Its opposite $\Fop$.
|
Its opposite $\Fop$.
|
||||||
\item
|
\item
|
||||||
Lawvere theory $\cat{L}$: an object in the category $\cat{Law}$.
|
Lawvere theory $\cat{L}$: an object in the category $\cat{Law}$.
|
||||||
\item
|
\item
|
||||||
Model $M$ of a Lawvere category: an object in the category\\
|
Model $M$ of a Lawvere category: an object in the category\\
|
||||||
$\cat{Mod}(\cat{Law}, \Set)$.
|
$\cat{Mod}(\cat{Law}, \Set)$.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.8\textwidth]{images/lawvere1.png}
|
\includegraphics[width=0.8\textwidth]{images/lawvere1.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\section{Lawvere Theories}
|
\section{Lawvere Theories}
|
||||||
@ -112,7 +112,7 @@ corresponding to all $n$-element sets in $\cat{FinSet}$ that have been
|
|||||||
identified through isomorphisms.
|
identified through isomorphisms.
|
||||||
|
|
||||||
Using the category $\cat{F}$ we can formally define a \newterm{Lawvere
|
Using the category $\cat{F}$ we can formally define a \newterm{Lawvere
|
||||||
theory} as a category $\cat{L}$ equipped with a special functor:
|
theory} as a category $\cat{L}$ equipped with a special functor:
|
||||||
\[I_{\cat{L}} \Colon \Fop \to \cat{L}\]
|
\[I_{\cat{L}} \Colon \Fop \to \cat{L}\]
|
||||||
This functor must be a bijection on objects and it must preserve finite
|
This functor must be a bijection on objects and it must preserve finite
|
||||||
products (products in $\Fop$ are the same as
|
products (products in $\Fop$ are the same as
|
||||||
@ -162,12 +162,12 @@ functions (or, as we've seen earlier, that the hom-functor is
|
|||||||
continuous).
|
continuous).
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.8\textwidth]{images/lawvere1.png}
|
\includegraphics[width=0.8\textwidth]{images/lawvere1.png}
|
||||||
\caption{Lawvere theory $\cat{L}$ is based on $\Fop$, from which
|
\caption{Lawvere theory $\cat{L}$ is based on $\Fop$, from which
|
||||||
it inherits the ``boring'' morphisms that define the products. It adds
|
it inherits the ``boring'' morphisms that define the products. It adds
|
||||||
the ``interesting'' morphisms that describe the $n$-ary operations (dotted
|
the ``interesting'' morphisms that describe the $n$-ary operations (dotted
|
||||||
arrows).}
|
arrows).}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
Lawvere theories form a category $\cat{Law}$, in which morphisms are
|
Lawvere theories form a category $\cat{Law}$, in which morphisms are
|
||||||
@ -176,8 +176,8 @@ $I$. Given two such theories, $(\cat{L}, I_{\cat{L}})$ and
|
|||||||
$(\cat{L'}, I'_{\cat{L'}})$, a morphism between them is a
|
$(\cat{L'}, I'_{\cat{L'}})$, a morphism between them is a
|
||||||
functor $F \Colon \cat{L} \to \cat{L'}$ such that:
|
functor $F \Colon \cat{L} \to \cat{L'}$ such that:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
F\ (m \times n) = F\ m \times F\ n \\
|
F\ (m \times n) = F\ m \times F\ n \\
|
||||||
F \circ I_{\cat{L}} = I'_{\cat{L'}}
|
F \circ I_{\cat{L}} = I'_{\cat{L'}}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Morphisms between Lawvere theories encapsulate the idea of the
|
Morphisms between Lawvere theories encapsulate the idea of the
|
||||||
interpretation of one theory inside another. For instance, group
|
interpretation of one theory inside another. For instance, group
|
||||||
@ -208,11 +208,11 @@ products, we require that such a functor preserve finite products. A
|
|||||||
model of $\cat{L}$, also called the algebra over the Lawvere theory
|
model of $\cat{L}$, also called the algebra over the Lawvere theory
|
||||||
$\cat{L}$, is therefore defined by a functor:
|
$\cat{L}$, is therefore defined by a functor:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
M \Colon \cat{L} \to \Set \\
|
M \Colon \cat{L} \to \Set \\
|
||||||
M\ (a \times b) \cong M\ a \times M\ b
|
M\ (a \times b) \cong M\ a \times M\ b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Notice that we require the preservation of products only \emph{up to
|
Notice that we require the preservation of products only \emph{up to
|
||||||
isomorphism}. This is very important, because strict preservation of
|
isomorphism}. This is very important, because strict preservation of
|
||||||
products would eliminate most interesting theories.
|
products would eliminate most interesting theories.
|
||||||
|
|
||||||
The preservation of products by models means that the image of
|
The preservation of products by models means that the image of
|
||||||
@ -269,7 +269,7 @@ structure of monoids. It is a single theory that distills the structure
|
|||||||
of all possible monoids, in the sense that the models of this theory
|
of all possible monoids, in the sense that the models of this theory
|
||||||
span the whole category $\cat{Mon}$ of monoids. We've already seen a
|
span the whole category $\cat{Mon}$ of monoids. We've already seen a
|
||||||
\hyperref[free-monoids]{universal
|
\hyperref[free-monoids]{universal
|
||||||
construction}, which showed that every monoid can be obtained from an
|
construction}, which showed that every monoid can be obtained from an
|
||||||
appropriate free monoid by identifying a subset of morphisms. So a
|
appropriate free monoid by identifying a subset of morphisms. So a
|
||||||
single free monoid already generalizes a whole lot of monoids. There
|
single free monoid already generalizes a whole lot of monoids. There
|
||||||
are, however, infinitely many free monoids. The Lawvere theory for
|
are, however, infinitely many free monoids. The Lawvere theory for
|
||||||
@ -320,7 +320,7 @@ $\cat{Mon}$.
|
|||||||
As you may remember, algebraic theories can be described using monads
|
As you may remember, algebraic theories can be described using monads
|
||||||
--- in particular
|
--- in particular
|
||||||
\hyperref[algebras-for-monads]{algebras
|
\hyperref[algebras-for-monads]{algebras
|
||||||
for monads}. It should be no surprise then that there is a connection
|
for monads}. It should be no surprise then that there is a connection
|
||||||
between Lawvere theories and monads.
|
between Lawvere theories and monads.
|
||||||
|
|
||||||
First, let's see how a Lawvere theory induces a monad. It does it
|
First, let's see how a Lawvere theory induces a monad. It does it
|
||||||
@ -366,7 +366,7 @@ a monad.
|
|||||||
|
|
||||||
It turns out that the category of
|
It turns out that the category of
|
||||||
\hyperref[algebras-for-monads]{algebras
|
\hyperref[algebras-for-monads]{algebras
|
||||||
for this monad} is equivalent to the category of models.
|
for this monad} is equivalent to the category of models.
|
||||||
|
|
||||||
You may recall that monad algebras define ways to evaluate expressions
|
You may recall that monad algebras define ways to evaluate expressions
|
||||||
that are formed using monads. A Lawvere theory defines n-ary operations
|
that are formed using monads. A Lawvere theory defines n-ary operations
|
||||||
@ -428,8 +428,8 @@ The lifting simply selects $m$ elements from a tuple of $n$ elements\\
|
|||||||
$(a_1, a_2,...a_n)$ (possibly with repetitions).
|
$(a_1, a_2,...a_n)$ (possibly with repetitions).
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/liftpower.png}
|
\includegraphics[width=0.5\textwidth]{images/liftpower.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -462,11 +462,11 @@ morphism with $\cat{L}(m, 1)$ gives us a subset of
|
|||||||
$\cat{L}(n, 1)$.
|
$\cat{L}(n, 1)$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[column sep=large]
|
\begin{tikzcd}[column sep=large]
|
||||||
\cat{L}(m, 1) \arrow[r] & \cat{L}(n, 1)\\
|
\cat{L}(m, 1) \arrow[r] & \cat{L}(n, 1)\\
|
||||||
{}^m \bullet \arrow[r, "f"'] & \bullet^n
|
{}^m \bullet \arrow[r, "f"'] & \bullet^n
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -485,24 +485,24 @@ Here, the coend starts as the disjoint sum of sets
|
|||||||
$a^n \times \cat{L}(n, 1)$ over all $n$s. The identifications can
|
$a^n \times \cat{L}(n, 1)$ over all $n$s. The identifications can
|
||||||
be generated by expressing the
|
be generated by expressing the
|
||||||
\hyperref[ends-and-coends]{coend as
|
\hyperref[ends-and-coends]{coend as
|
||||||
a coequalizer}. We start with an off-diagonal term
|
a coequalizer}. We start with an off-diagonal term
|
||||||
$a^n \times \cat{L}(m, 1)$. To get to the diagonal, we can apply a
|
$a^n \times \cat{L}(m, 1)$. To get to the diagonal, we can apply a
|
||||||
morphism $f \Colon m \to n$ either to the first or
|
morphism $f \Colon m \to n$ either to the first or
|
||||||
the second component of the product. The two results are then
|
the second component of the product. The two results are then
|
||||||
identified.
|
identified.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}
|
\begin{tikzcd}
|
||||||
& a^n \times \cat{L}(m, 1)
|
& a^n \times \cat{L}(m, 1)
|
||||||
\arrow[dl, "\langle f {,} \id \rangle"']
|
\arrow[dl, "\langle f {,} \id \rangle"']
|
||||||
\arrow[dr, "\langle \id {,} f \rangle"]
|
\arrow[dr, "\langle \id {,} f \rangle"]
|
||||||
& \\
|
& \\
|
||||||
a^m \times \cat{L}(m, 1)
|
a^m \times \cat{L}(m, 1)
|
||||||
& \scalebox{2.5}[1]{\sim}
|
& \scalebox{2.5}[1]{\sim}
|
||||||
& a^n \times \cat{L}(n, 1) \\
|
& a^n \times \cat{L}(n, 1) \\
|
||||||
& f \Colon m \to n &
|
& f \Colon m \to n &
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -575,17 +575,17 @@ the coend formula, because they can be obtained from:
|
|||||||
by lifting $0 \to n$ in two different ways.
|
by lifting $0 \to n$ in two different ways.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}
|
\begin{tikzcd}
|
||||||
& a^n \times \cat{L}(0, 1)
|
& a^n \times \cat{L}(0, 1)
|
||||||
\arrow[dl, "\langle f {,} \id \rangle"']
|
\arrow[dl, "\langle f {,} \id \rangle"']
|
||||||
\arrow[dr, "\langle \id {,} f \rangle"]
|
\arrow[dr, "\langle \id {,} f \rangle"]
|
||||||
& \\
|
& \\
|
||||||
a^0 \times \cat{L}(0, 1)
|
a^0 \times \cat{L}(0, 1)
|
||||||
& \scalebox{2.5}[1]{\sim}
|
& \scalebox{2.5}[1]{\sim}
|
||||||
& a^n \times \cat{L}(n, 1) \\
|
& a^n \times \cat{L}(n, 1) \\
|
||||||
& f \Colon 0 \to n &
|
& f \Colon 0 \to n &
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -603,18 +603,18 @@ not their handling.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Enumerate all morphisms between $2$ and $3$ in $\cat{F}$ (the skeleton of
|
Enumerate all morphisms between $2$ and $3$ in $\cat{F}$ (the skeleton of
|
||||||
$\cat{FinSet}$).
|
$\cat{FinSet}$).
|
||||||
\item
|
\item
|
||||||
Show that the category of models for the Lawvere theory of monoids is
|
Show that the category of models for the Lawvere theory of monoids is
|
||||||
equivalent to the category of monad algebras for the list monad.
|
equivalent to the category of monad algebras for the list monad.
|
||||||
\item
|
\item
|
||||||
The Lawvere theory of monoids generates the list monad. Show that its
|
The Lawvere theory of monoids generates the list monad. Show that its
|
||||||
binary operations can be generated using the corresponding Kleisli
|
binary operations can be generated using the corresponding Kleisli
|
||||||
arrows.
|
arrows.
|
||||||
\item
|
\item
|
||||||
\textbf{FinSet} is a subcategory of $\Set$ and there is a
|
\textbf{FinSet} is a subcategory of $\Set$ and there is a
|
||||||
functor that embeds it in $\Set$. Any functor on $\Set$
|
functor that embeds it in $\Set$. Any functor on $\Set$
|
||||||
can be restricted to $\cat{FinSet}$. Show that a finitary functor is
|
can be restricted to $\cat{FinSet}$. Show that a finitary functor is
|
||||||
|
@ -47,9 +47,9 @@ are called $0$-cells, morphisms are $1$-cells, and morphisms between
|
|||||||
morphisms are $2$-cells.
|
morphisms are $2$-cells.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/twocat.png}
|
\includegraphics[width=0.35\textwidth]{images/twocat.png}
|
||||||
\caption{$0$-cells $a, b$; $1$-cells $f, g$; and a $2$-cell $\alpha$.}
|
\caption{$0$-cells $a, b$; $1$-cells $f, g$; and a $2$-cell $\alpha$.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -99,10 +99,10 @@ words, there is a $2$-cell:
|
|||||||
that has an inverse.
|
that has an inverse.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/bicat.png}
|
\includegraphics[width=0.35\textwidth]{images/bicat.png}
|
||||||
\caption{Identity law in a bicategory holds up to isomorphism (an invertible
|
\caption{Identity law in a bicategory holds up to isomorphism (an invertible
|
||||||
$2$-cell $\rho$).}
|
$2$-cell $\rho$).}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -117,13 +117,13 @@ An interesting example of a bicategory is the category of spans. A span
|
|||||||
between two objects $a$ and $b$ is an object $x$
|
between two objects $a$ and $b$ is an object $x$
|
||||||
and a pair of morphisms:
|
and a pair of morphisms:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f \Colon x \to a \\
|
f \Colon x \to a \\
|
||||||
g \Colon x \to b
|
g \Colon x \to b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.35\textwidth]{images/span.png}
|
\includegraphics[width=0.35\textwidth]{images/span.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -132,13 +132,13 @@ product. Here, we want to look at spans as $1$-cells in a bicategory. The
|
|||||||
first step is to define a composition of spans. Suppose that we have an
|
first step is to define a composition of spans. Suppose that we have an
|
||||||
adjoining span:
|
adjoining span:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f' \Colon y \to b \\
|
f' \Colon y \to b \\
|
||||||
g' \Colon y \to c
|
g' \Colon y \to c
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/compspan.png}
|
\includegraphics[width=0.5\textwidth]{images/compspan.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -147,16 +147,16 @@ most natural choice for it is the pullback of $g$ along
|
|||||||
$f'$. Remember that a pullback is the object $z$
|
$f'$. Remember that a pullback is the object $z$
|
||||||
together with two morphisms:
|
together with two morphisms:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
h &\Colon z \to x \\
|
h & \Colon z \to x \\
|
||||||
h' &\Colon z \to y
|
h' & \Colon z \to y
|
||||||
\end{align*}
|
\end{align*}
|
||||||
such that:
|
such that:
|
||||||
\[g \circ h = f' \circ h'\]
|
\[g \circ h = f' \circ h'\]
|
||||||
which is universal among all such objects.
|
which is universal among all such objects.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/pullspan.png}
|
\includegraphics[width=0.5\textwidth]{images/pullspan.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -169,9 +169,9 @@ a morphism $h$ between their apices, such that the appropriate
|
|||||||
triangles commute.
|
triangles commute.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/morphspan.png}
|
\includegraphics[width=0.4\textwidth]{images/morphspan.png}
|
||||||
\caption{A $2$-cell in $\cat{Span}$.}
|
\caption{A $2$-cell in $\cat{Span}$.}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -206,8 +206,8 @@ we get:
|
|||||||
\[\mu \Colon T \circ T \to T\]
|
\[\mu \Colon T \circ T \to T\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/monad.png}
|
\includegraphics[width=0.3\textwidth]{images/monad.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -221,14 +221,14 @@ As we've seen earlier, the hom-category $\cat{C}(a, a)$ is a monoidal
|
|||||||
category. We can therefore define a monoid in $\cat{C}(a, a)$ by
|
category. We can therefore define a monoid in $\cat{C}(a, a)$ by
|
||||||
picking a $1$-cell, $T$, and two $2$-cells:
|
picking a $1$-cell, $T$, and two $2$-cells:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\eta &\Colon I \to T \\
|
\eta & \Colon I \to T \\
|
||||||
\mu &\Colon T \circ T \to T
|
\mu & \Colon T \circ T \to T
|
||||||
\end{align*}
|
\end{align*}
|
||||||
satisfying the monoid laws. We call \emph{this} a monad.
|
satisfying the monoid laws. We call \emph{this} a monad.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/bimonad.png}
|
\includegraphics[width=0.3\textwidth]{images/bimonad.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -243,13 +243,13 @@ $Ob$. Next, we pick an endo-$1$-cell: a span from $Ob$ back
|
|||||||
to $Ob$. It has a set at the apex, which I will call $Ar$,
|
to $Ob$. It has a set at the apex, which I will call $Ar$,
|
||||||
equipped with two functions:
|
equipped with two functions:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
dom &\Colon Ar \to Ob \\
|
dom & \Colon Ar \to Ob \\
|
||||||
cod &\Colon Ar \to Ob
|
cod & \Colon Ar \to Ob
|
||||||
\end{align*}
|
\end{align*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/spanmonad.png}
|
\includegraphics[width=0.3\textwidth]{images/spanmonad.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -266,13 +266,13 @@ $Ob$ and $Ar$. In other words, $\eta$ assigns an
|
|||||||
``arrow'' to every ``object.'' A $2$-cell in $\cat{Span}$ must satisfy
|
``arrow'' to every ``object.'' A $2$-cell in $\cat{Span}$ must satisfy
|
||||||
commutation conditions --- in this case:
|
commutation conditions --- in this case:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
dom &\circ \eta = \id \\
|
dom & \circ \eta = \id \\
|
||||||
cod &\circ \eta = \id
|
cod & \circ \eta = \id
|
||||||
\end{align*}
|
\end{align*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/spanunit.png}
|
\includegraphics[width=0.4\textwidth]{images/spanunit.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -292,8 +292,8 @@ We say that $a_1$ and $a_2$ are ``composable,'' because the
|
|||||||
domain of one is the codomain of the other.
|
domain of one is the codomain of the other.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/spanmul.png}
|
\includegraphics[width=0.5\textwidth]{images/spanmul.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -319,16 +319,16 @@ all of mathematics, this is a very humbling realization.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Derive unit and associativity laws for the tensor product defined as
|
Derive unit and associativity laws for the tensor product defined as
|
||||||
composition of endo-$1$-cells in a bicategory.
|
composition of endo-$1$-cells in a bicategory.
|
||||||
\item
|
\item
|
||||||
Check that monad laws for a monad in $\cat{Span}$ correspond to
|
Check that monad laws for a monad in $\cat{Span}$ correspond to
|
||||||
identity and associativity laws in the resulting category.
|
identity and associativity laws in the resulting category.
|
||||||
\item
|
\item
|
||||||
Show that a monad in $\cat{Prof}$ is an identity-on-objects functor.
|
Show that a monad in $\cat{Prof}$ is an identity-on-objects functor.
|
||||||
\item
|
\item
|
||||||
What's a monad algebra for a monad in $\cat{Span}$?
|
What's a monad algebra for a monad in $\cat{Span}$?
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
|
||||||
|
@ -36,8 +36,8 @@ $L \circ R$; and two possible identity functors: one in $\cat{C}$
|
|||||||
and another in $\cat{D}$.
|
and another in $\cat{D}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-1.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-1.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -78,20 +78,20 @@ between $L \circ R$ and the identity functor $I_{\cat{C}}$.
|
|||||||
Adjunction is even weaker than equivalence, because it doesn't require
|
Adjunction is even weaker than equivalence, because it doesn't require
|
||||||
that the composition of the two functors be \emph{isomorphic} to the
|
that the composition of the two functors be \emph{isomorphic} to the
|
||||||
identity functor. Instead it stipulates the existence of a \newterm{one
|
identity functor. Instead it stipulates the existence of a \newterm{one
|
||||||
way} natural transformation from $I_{\cat{D}}$ to $R \circ L$, and
|
way} natural transformation from $I_{\cat{D}}$ to $R \circ L$, and
|
||||||
another from $L \circ R$ to $I_{\cat{C}}$. Here are the signatures of
|
another from $L \circ R$ to $I_{\cat{C}}$. Here are the signatures of
|
||||||
these two natural transformations:
|
these two natural transformations:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\eta \Colon I_{\cat{D}} \to R \circ L \\
|
\eta \Colon I_{\cat{D}} \to R \circ L \\
|
||||||
\varepsilon \Colon L \circ R \to I_{\cat{C}}
|
\varepsilon \Colon L \circ R \to I_{\cat{C}}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
$\eta$ is called the unit, and $\varepsilon$ the counit of the adjunction.
|
$\eta$ is called the unit, and $\varepsilon$ the counit of the adjunction.
|
||||||
|
|
||||||
Notice the asymmetry between these two definitions. In general, we don't
|
Notice the asymmetry between these two definitions. In general, we don't
|
||||||
have the two remaining mappings:
|
have the two remaining mappings:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
R \circ L \to I_{\cat{D}} \quad\quad\text{not necessarily} \\
|
R \circ L \to I_{\cat{D}} \quad\quad\text{not necessarily} \\
|
||||||
I_{\cat{C}} \to L \circ R \quad\quad\text{not necessarily}
|
I_{\cat{C}} \to L \circ R \quad\quad\text{not necessarily}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Because of this asymmetry, the functor $L$ is called the
|
Because of this asymmetry, the functor $L$ is called the
|
||||||
\newterm{left adjoint} to the functor $R$, while the functor
|
\newterm{left adjoint} to the functor $R$, while the functor
|
||||||
@ -104,8 +104,8 @@ To better understand the adjunction, let's analyze the unit and the
|
|||||||
counit in more detail.
|
counit in more detail.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-unit.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-unit.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -123,8 +123,8 @@ $R \circ L$ to pick our target object $d'$. Then we
|
|||||||
shoot an arrow --- the morphism $\eta_d$ --- to our target.
|
shoot an arrow --- the morphism $\eta_d$ --- to our target.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-counit.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-counit.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -144,8 +144,8 @@ identity on $\cat{C}$. That leads to some ``obvious'' consistency
|
|||||||
conditions, which make sure that introduction followed by elimination
|
conditions, which make sure that introduction followed by elimination
|
||||||
doesn't change anything:
|
doesn't change anything:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
L = L \circ I_{\cat{D}} \to L \circ R \circ L \to I_{\cat{C}} \circ L = L \\
|
L = L \circ I_{\cat{D}} \to L \circ R \circ L \to I_{\cat{C}} \circ L = L \\
|
||||||
R = I_{\cat{D}} \circ R \to R \circ L \circ R \to R \circ I_{\cat{C}} = R
|
R = I_{\cat{D}} \circ R \to R \circ L \circ R \to R \circ I_{\cat{C}} = R
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
These are called triangular identities because they make the following
|
These are called triangular identities because they make the following
|
||||||
diagrams commute:
|
diagrams commute:
|
||||||
@ -177,8 +177,8 @@ These are diagrams in the functor category: the arrows are natural
|
|||||||
transformations, and their composition is the horizontal composition of
|
transformations, and their composition is the horizontal composition of
|
||||||
natural transformations. In components, these identities become:
|
natural transformations. In components, these identities become:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\varepsilon_{L d} \circ L \eta_d = \id_{L d} \\
|
\varepsilon_{L d} \circ L \eta_d = \id_{L d} \\
|
||||||
R \varepsilon_{c} \circ \eta_{R c} = \id_{R c}
|
R \varepsilon_{c} \circ \eta_{R c} = \id_{R c}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
We often see unit and counit in Haskell under different names. Unit is
|
We often see unit and counit in Haskell under different names. Unit is
|
||||||
known as \code{return} (or \code{pure}, in the definition of
|
known as \code{return} (or \code{pure}, in the definition of
|
||||||
@ -254,8 +254,8 @@ objects exists in a category, it can be also defined through an
|
|||||||
adjunction.
|
adjunction.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-homsets.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-homsets.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -281,14 +281,14 @@ precisely, we have a natural transformation $\varphi$ between the
|
|||||||
following two (covariant) functors from $\cat{C}$ to $\Set$. Here's
|
following two (covariant) functors from $\cat{C}$ to $\Set$. Here's
|
||||||
the action of these functors on objects:
|
the action of these functors on objects:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
c \to \cat{C}(L d, c) \\
|
c \to \cat{C}(L d, c) \\
|
||||||
c \to \cat{D}(d, R c)
|
c \to \cat{D}(d, R c)
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
The other natural transformation, $\psi$, acts between the following
|
The other natural transformation, $\psi$, acts between the following
|
||||||
(contravariant) functors:
|
(contravariant) functors:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
d \to \cat{C}(L d, c) \\
|
d \to \cat{C}(L d, c) \\
|
||||||
d \to \cat{D}(d, R c)
|
d \to \cat{D}(d, R c)
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Both natural transformations must be invertible.
|
Both natural transformations must be invertible.
|
||||||
|
|
||||||
@ -345,7 +345,7 @@ as an exercise.
|
|||||||
|
|
||||||
We are now ready to explain why, in Haskell, the right adjoint is
|
We are now ready to explain why, in Haskell, the right adjoint is
|
||||||
automatically a \hyperref[representable-functors]{representable
|
automatically a \hyperref[representable-functors]{representable
|
||||||
functor}. The reason for this is that, to the first approximation, we
|
functor}. The reason for this is that, to the first approximation, we
|
||||||
can treat the category of Haskell types as the category of sets.
|
can treat the category of Haskell types as the category of sets.
|
||||||
|
|
||||||
When the right category $\cat{D}$ is $\Set$, the right adjoint
|
When the right category $\cat{D}$ is $\Set$, the right adjoint
|
||||||
@ -373,7 +373,7 @@ We have previously introduced several concepts using universal
|
|||||||
constructions. Many of those concepts, when defined globally, are easier
|
constructions. Many of those concepts, when defined globally, are easier
|
||||||
to express using adjunctions. The simplest non-trivial example is that
|
to express using adjunctions. The simplest non-trivial example is that
|
||||||
of the product. The gist of the \hyperref[products-and-coproducts]{universal
|
of the product. The gist of the \hyperref[products-and-coproducts]{universal
|
||||||
construction of the product} is the ability to factorize any
|
construction of the product} is the ability to factorize any
|
||||||
product-like candidate through the universal product.
|
product-like candidate through the universal product.
|
||||||
|
|
||||||
More precisely, the product of two objects $a$ and $b$ is
|
More precisely, the product of two objects $a$ and $b$ is
|
||||||
@ -411,8 +411,8 @@ product category $\cat{C}\times{}\cat{C}$. Pairs of morphism from $\cat{C}$ are
|
|||||||
morphisms in the product category $\cat{C}\times{}\cat{C}$.
|
morphisms in the product category $\cat{C}\times{}\cat{C}$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-productcat.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-productcat.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -439,8 +439,8 @@ first hom-set is in the product category $\cat{C}\times{}\cat{C}$, and the secon
|
|||||||
in $\cat{C}$. A general morphism in $\cat{C}\times{}\cat{C}$ would be a pair of
|
in $\cat{C}$. A general morphism in $\cat{C}\times{}\cat{C}$ would be a pair of
|
||||||
morphisms $\langle f, g \rangle$:
|
morphisms $\langle f, g \rangle$:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f \Colon c' \to a \\
|
f \Colon c' \to a \\
|
||||||
g \Colon c'' \to b
|
g \Colon c'' \to b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
with $c''$ potentially different from
|
with $c''$ potentially different from
|
||||||
$c'$. But to define a product, we are interested in a
|
$c'$. But to define a product, we are interested in a
|
||||||
@ -466,8 +466,8 @@ $a\times{}b$. We recognize this element of the hom-set as the
|
|||||||
\[\ldots{} \to (c \to (a, b))\]
|
\[\ldots{} \to (c \to (a, b))\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/adj-product.jpg}
|
\includegraphics[width=0.5\textwidth]{images/adj-product.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -517,13 +517,13 @@ for instance:
|
|||||||
|
|
||||||
The exponential $b^a$, or the function object $a \Rightarrow b$, can be
|
The exponential $b^a$, or the function object $a \Rightarrow b$, can be
|
||||||
defined using a \hyperref[function-types]{universal
|
defined using a \hyperref[function-types]{universal
|
||||||
construction}. This construction, if it exists for all pairs of objects,
|
construction}. This construction, if it exists for all pairs of objects,
|
||||||
can be seen as an adjunction. Again, the trick is to concentrate on the
|
can be seen as an adjunction. Again, the trick is to concentrate on the
|
||||||
statement:
|
statement:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
For any other object $z$ with a morphism $g \Colon z\times{}a \to b$
|
For any other object $z$ with a morphism $g \Colon z\times{}a \to b$
|
||||||
there is a unique morphism $h \Colon z \to (a \Rightarrow b)$
|
there is a unique morphism $h \Colon z \to (a \Rightarrow b)$
|
||||||
\end{quote}
|
\end{quote}
|
||||||
This statement establishes a mapping between hom-sets.
|
This statement establishes a mapping between hom-sets.
|
||||||
|
|
||||||
@ -541,13 +541,13 @@ The mapping of hom-sets that underlies this adjunction is best seen by
|
|||||||
redrawing the diagram that we used in the universal construction.
|
redrawing the diagram that we used in the universal construction.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/adj-expo.jpg}
|
\includegraphics[width=0.4\textwidth]{images/adj-expo.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
Notice that the $eval$ morphism\footnote{See ch.9 on \hyperref[function-types]{universal
|
Notice that the $eval$ morphism\footnote{See ch.9 on \hyperref[function-types]{universal
|
||||||
construction}.} is nothing else but the counit of
|
construction}.} is nothing else but the counit of
|
||||||
this adjunction:
|
this adjunction:
|
||||||
\[(a \Rightarrow b)\times{}a \to b\]
|
\[(a \Rightarrow b)\times{}a \to b\]
|
||||||
where:
|
where:
|
||||||
@ -560,26 +560,26 @@ a functor has an adjoint, this adjoint is unique up to isomorphism.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Derive the naturality square for $\psi$, the transformation
|
Derive the naturality square for $\psi$, the transformation
|
||||||
between the two (contravariant) functors:
|
between the two (contravariant) functors:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
a \to \cat{C}(L a, b) \\
|
a \to \cat{C}(L a, b) \\
|
||||||
a \to \cat{D}(a, R b)
|
a \to \cat{D}(a, R b)
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
\item
|
\item
|
||||||
Derive the counit $\varepsilon$ starting from the hom-sets isomorphism in
|
Derive the counit $\varepsilon$ starting from the hom-sets isomorphism in
|
||||||
the second definition of the adjunction.
|
the second definition of the adjunction.
|
||||||
\item
|
\item
|
||||||
Complete the proof of equivalence of the two definitions of the
|
Complete the proof of equivalence of the two definitions of the
|
||||||
adjunction.
|
adjunction.
|
||||||
\item
|
\item
|
||||||
Show that the coproduct can be defined by an adjunction. Start with
|
Show that the coproduct can be defined by an adjunction. Start with
|
||||||
the definition of the factorizer for a coproduct.
|
the definition of the factorizer for a coproduct.
|
||||||
\item
|
\item
|
||||||
Show that the coproduct is the left adjoint of the diagonal functor.
|
Show that the coproduct is the left adjoint of the diagonal functor.
|
||||||
\item
|
\item
|
||||||
Define the adjunction between a product and a function object in
|
Define the adjunction between a product and a function object in
|
||||||
Haskell.
|
Haskell.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
\lettrine[lhang=0.17]{F}{ree constructions are} a powerful application of adjunctions. A
|
\lettrine[lhang=0.17]{F}{ree constructions are} a powerful application of adjunctions. A
|
||||||
\newterm{free functor} is defined as the left adjoint to a \newterm{forgetful
|
\newterm{free functor} is defined as the left adjoint to a \newterm{forgetful
|
||||||
functor}. A forgetful functor is usually a pretty simple functor that
|
functor}. A forgetful functor is usually a pretty simple functor that
|
||||||
forgets some structure. For instance, lots of interesting categories are
|
forgets some structure. For instance, lots of interesting categories are
|
||||||
built on top of sets. But categorical objects, which abstract those
|
built on top of sets. But categorical objects, which abstract those
|
||||||
sets, have no internal structure --- they have no elements. Still, those
|
sets, have no internal structure --- they have no elements. Still, those
|
||||||
@ -27,10 +27,10 @@ morphism in $\cat{Mon}$.\\
|
|||||||
Things to keep in mind:
|
Things to keep in mind:
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
There may be many monoids that map to the same set, and
|
There may be many monoids that map to the same set, and
|
||||||
\item
|
\item
|
||||||
There are fewer (or at most as many as) monoid morphisms than there
|
There are fewer (or at most as many as) monoid morphisms than there
|
||||||
are functions between their underlying sets.
|
are functions between their underlying sets.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
@ -40,12 +40,12 @@ The functor $F$ that's the left adjoint to the forgetful functor
|
|||||||
$U$ is the free functor that builds free monoids from their
|
$U$ is the free functor that builds free monoids from their
|
||||||
generator sets. The adjunction follows from the free monoid
|
generator sets. The adjunction follows from the free monoid
|
||||||
universal construction we've discussed before.\footnote{See ch.13 on
|
universal construction we've discussed before.\footnote{See ch.13 on
|
||||||
\hyperref[free-monoids]{free monoids}.}
|
\hyperref[free-monoids]{free monoids}.}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.6\textwidth]{images/forgetful.jpg}
|
\includegraphics[width=0.6\textwidth]{images/forgetful.jpg}
|
||||||
\caption{Monoids $m_1$ and $m_2$ have the same
|
\caption{Monoids $m_1$ and $m_2$ have the same
|
||||||
underlying set. There are more functions between the underlying sets of
|
underlying set. There are more functions between the underlying sets of
|
||||||
$m_2$ and $m_3$ than there are morphisms
|
$m_2$ and $m_3$ than there are morphisms
|
||||||
between them.}
|
between them.}
|
||||||
@ -57,14 +57,14 @@ In terms of hom-sets, we can write this adjunction as:
|
|||||||
This (natural in $x$ and $m$) isomorphism tells us that:
|
This (natural in $x$ and $m$) isomorphism tells us that:
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
For every monoid homomorphism between the free monoid $F x$
|
For every monoid homomorphism between the free monoid $F x$
|
||||||
generated by $x$ and an arbitrary monoid $m$ there is a
|
generated by $x$ and an arbitrary monoid $m$ there is a
|
||||||
unique function that embeds the set of generators $x$ in the
|
unique function that embeds the set of generators $x$ in the
|
||||||
underlying set of $m$. It's a function in
|
underlying set of $m$. It's a function in
|
||||||
$\Set(x, U m)$.
|
$\Set(x, U m)$.
|
||||||
\item
|
\item
|
||||||
For every function that embeds $x$ in the underlying set of
|
For every function that embeds $x$ in the underlying set of
|
||||||
some $m$ there is a unique monoid morphism between the free
|
some $m$ there is a unique monoid morphism between the free
|
||||||
monoid generated by $x$ and the monoid $m$. (This is the
|
monoid generated by $x$ and the monoid $m$. (This is the
|
||||||
@ -72,8 +72,8 @@ This (natural in $x$ and $m$) isomorphism tells us that:
|
|||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.8\textwidth]{images/freemonadjunction.jpg}
|
\includegraphics[width=0.8\textwidth]{images/freemonadjunction.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -90,7 +90,7 @@ versa.
|
|||||||
|
|
||||||
In Haskell, the list data structure is a free monoid (with some caveats:
|
In Haskell, the list data structure is a free monoid (with some caveats:
|
||||||
see \urlref{http://comonad.com/reader/2015/free-monoids-in-haskell/}{Dan
|
see \urlref{http://comonad.com/reader/2015/free-monoids-in-haskell/}{Dan
|
||||||
Doel's blog post}). A list type \code{{[}a{]}} is a free monoid with
|
Doel's blog post}). A list type \code{{[}a{]}} is a free monoid with
|
||||||
the type \code{a} representing the set of generators. For instance,
|
the type \code{a} representing the set of generators. For instance,
|
||||||
the type \code{{[}Char{]}} contains the unit element --- the empty
|
the type \code{{[}Char{]}} contains the unit element --- the empty
|
||||||
list \code{{[}{]}} --- and the singletons like
|
list \code{{[}{]}} --- and the singletons like
|
||||||
@ -174,8 +174,8 @@ $\cat{C}(c', c)$ have to preserve the additional structure,
|
|||||||
whereas the ones in $\cat{D}(U c', U c)$ don't.
|
whereas the ones in $\cat{D}(U c', U c)$ don't.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.45\textwidth]{images/forgettingmorphisms.jpg}
|
\includegraphics[width=0.45\textwidth]{images/forgettingmorphisms.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -195,8 +195,8 @@ preserve by morphisms). Such ``structure-free'' objects are called free
|
|||||||
objects.
|
objects.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.45\textwidth]{images/freeimage.jpg}
|
\includegraphics[width=0.45\textwidth]{images/freeimage.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -209,7 +209,7 @@ there is no identification of ${[}2, 3{]}$ and $6$, a morphism from this
|
|||||||
free monoid to any other monoid $m$ is allowed to map them
|
free monoid to any other monoid $m$ is allowed to map them
|
||||||
separately. But it's also okay for it to map both ${[}2, 3{]}$ and $6$
|
separately. But it's also okay for it to map both ${[}2, 3{]}$ and $6$
|
||||||
(their product) to the same element of $m$. Or to identify ${[}2,
|
(their product) to the same element of $m$. Or to identify ${[}2,
|
||||||
3{]}$ and $5$ (their sum) in an additive monoid, and so on. Different
|
3{]}$ and $5$ (their sum) in an additive monoid, and so on. Different
|
||||||
identifications give you different monoids.
|
identifications give you different monoids.
|
||||||
|
|
||||||
This leads to another interesting intuition: Free monoids, instead of
|
This leads to another interesting intuition: Free monoids, instead of
|
||||||
@ -234,8 +234,8 @@ intermediate results.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Consider a free monoid built from a singleton set as its generator.
|
Consider a free monoid built from a singleton set as its generator.
|
||||||
Show that there is a one-to-one correspondence between morphisms from
|
Show that there is a one-to-one correspondence between morphisms from
|
||||||
this free monoid to any monoid $m$, and functions from the
|
this free monoid to any monoid $m$, and functions from the
|
||||||
|
@ -21,18 +21,18 @@ tape) and its applications. Here's a little sample of things that you
|
|||||||
can do with it:
|
can do with it:
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
sealing ducts
|
sealing ducts
|
||||||
\item
|
\item
|
||||||
fixing CO\textsubscript{2} scrubbers on board Apollo 13
|
fixing CO\textsubscript{2} scrubbers on board Apollo 13
|
||||||
\item
|
\item
|
||||||
wart treatment
|
wart treatment
|
||||||
\item
|
\item
|
||||||
fixing Apple's iPhone 4 dropped call issue
|
fixing Apple's iPhone 4 dropped call issue
|
||||||
\item
|
\item
|
||||||
making a prom dress
|
making a prom dress
|
||||||
\item
|
\item
|
||||||
building a suspension bridge
|
building a suspension bridge
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ the first place.
|
|||||||
|
|
||||||
We have previously arrived at the
|
We have previously arrived at the
|
||||||
\hyperref[kleisli-categories]{writer
|
\hyperref[kleisli-categories]{writer
|
||||||
monad} by embellishing regular functions. The particular embellishment
|
monad} by embellishing regular functions. The particular embellishment
|
||||||
was done by pairing their return values with strings or, more generally,
|
was done by pairing their return values with strings or, more generally,
|
||||||
with elements of a monoid. We can now recognize that such an embellishment
|
with elements of a monoid. We can now recognize that such an embellishment
|
||||||
is a functor:
|
is a functor:
|
||||||
@ -311,7 +311,7 @@ Interestingly, the equivalent of the \code{do} notation has found its
|
|||||||
application in imperative languages, C++ in particular. I'm talking
|
application in imperative languages, C++ in particular. I'm talking
|
||||||
about resumable functions or coroutines. It's not a secret that C++
|
about resumable functions or coroutines. It's not a secret that C++
|
||||||
\urlref{https://bartoszmilewski.com/2014/02/26/c17-i-see-a-monad-in-your-future/}{futures
|
\urlref{https://bartoszmilewski.com/2014/02/26/c17-i-see-a-monad-in-your-future/}{futures
|
||||||
form a monad}. It's an example of the continuation monad, which we'll
|
form a monad}. It's an example of the continuation monad, which we'll
|
||||||
discuss shortly. The problem with continuations is that they are very
|
discuss shortly. The problem with continuations is that they are very
|
||||||
hard to compose. In Haskell, we use the \code{do} notation to turn the
|
hard to compose. In Haskell, we use the \code{do} notation to turn the
|
||||||
spaghetti of ``my handler will call your handler'' into something that
|
spaghetti of ``my handler will call your handler'' into something that
|
||||||
@ -319,7 +319,7 @@ looks very much like sequential code. Resumable functions make the same
|
|||||||
transformation possible in C++. And the same mechanism can be applied to
|
transformation possible in C++. And the same mechanism can be applied to
|
||||||
turn the
|
turn the
|
||||||
\urlref{https://bartoszmilewski.com/2014/04/21/getting-lazy-with-c/}{spaghetti
|
\urlref{https://bartoszmilewski.com/2014/04/21/getting-lazy-with-c/}{spaghetti
|
||||||
of nested loops} into list comprehensions or ``generators,'' which are
|
of nested loops} into list comprehensions or ``generators,'' which are
|
||||||
essentially the \code{do} notation for the list monad. Without the
|
essentially the \code{do} notation for the list monad. Without the
|
||||||
unifying abstraction of the monad, each of these problems is typically
|
unifying abstraction of the monad, each of these problems is typically
|
||||||
addressed by providing custom extensions to the language. In Haskell,
|
addressed by providing custom extensions to the language. In Haskell,
|
||||||
|
@ -13,16 +13,16 @@ functions.
|
|||||||
|
|
||||||
Here is a short list of similar problems, copied from
|
Here is a short list of similar problems, copied from
|
||||||
\urlref{https://core.ac.uk/download/pdf/21173011.pdf}{Eugenio Moggi's
|
\urlref{https://core.ac.uk/download/pdf/21173011.pdf}{Eugenio Moggi's
|
||||||
seminal paper}, all of which are traditionally solved by abandoning the
|
seminal paper}, all of which are traditionally solved by abandoning the
|
||||||
purity of functions.
|
purity of functions.
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Partiality: Computations that may not terminate
|
Partiality: Computations that may not terminate
|
||||||
\item
|
\item
|
||||||
Nondeterminism: Computations that may return many results
|
Nondeterminism: Computations that may return many results
|
||||||
\item
|
\item
|
||||||
Side effects: Computations that access/modify state
|
Side effects: Computations that access/modify state
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
@ -34,14 +34,14 @@ purity of functions.
|
|||||||
\item
|
\item
|
||||||
Read/write state
|
Read/write state
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
\item
|
\item
|
||||||
Exceptions: Partial functions that may fail
|
Exceptions: Partial functions that may fail
|
||||||
\item
|
\item
|
||||||
Continuations: Ability to save state of the program and then restore
|
Continuations: Ability to save state of the program and then restore
|
||||||
it on demand
|
it on demand
|
||||||
\item
|
\item
|
||||||
Interactive Input
|
Interactive Input
|
||||||
\item
|
\item
|
||||||
Interactive Output
|
Interactive Output
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ Haskell (lifted) types and functions rather than the simpler
|
|||||||
$\Set$. It is not clear, though, that $\Hask$ is a real
|
$\Set$. It is not clear, though, that $\Hask$ is a real
|
||||||
category (see this
|
category (see this
|
||||||
\urlref{http://math.andrej.com/2016/08/06/hask-is-not-a-category/}{Andrej
|
\urlref{http://math.andrej.com/2016/08/06/hask-is-not-a-category/}{Andrej
|
||||||
Bauer post}).
|
Bauer post}).
|
||||||
|
|
||||||
\subsection{Nondeterminism}
|
\subsection{Nondeterminism}
|
||||||
|
|
||||||
@ -134,7 +134,7 @@ From the programmer's point of view, working with a list is easier than,
|
|||||||
for instance, calling a non-deterministic function in a loop, or
|
for instance, calling a non-deterministic function in a loop, or
|
||||||
implementing a function that returns an iterator (although,
|
implementing a function that returns an iterator (although,
|
||||||
\urlref{http://ericniebler.com/2014/04/27/range-comprehensions/}{in modern
|
\urlref{http://ericniebler.com/2014/04/27/range-comprehensions/}{in modern
|
||||||
C++}, returning a lazy range would be almost equivalent to returning a
|
C++}, returning a lazy range would be almost equivalent to returning a
|
||||||
list in Haskell).
|
list in Haskell).
|
||||||
|
|
||||||
A good example of using non-determinism creatively is in game
|
A good example of using non-determinism creatively is in game
|
||||||
|
@ -15,8 +15,8 @@ like $1$ or $2$, bound together with operators like plus or times. As
|
|||||||
programmers, we often think of expressions as trees.
|
programmers, we often think of expressions as trees.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/exptree.png}
|
\includegraphics[width=0.3\textwidth]{images/exptree.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -79,8 +79,8 @@ implemented using $\mu$:
|
|||||||
\[g \circ_T f = \mu_c \circ (T\ g) \circ f\]
|
\[g \circ_T f = \mu_c \circ (T\ g) \circ f\]
|
||||||
where
|
where
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f \Colon a \to T\ b \\
|
f \Colon a \to T\ b \\
|
||||||
g \Colon b \to T\ c
|
g \Colon b \to T\ c
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Here $T$, being a functor, can be applied to the morphism
|
Here $T$, being a functor, can be applied to the morphism
|
||||||
$g$. It might be easier to recognize this formula in Haskell
|
$g$. It might be easier to recognize this formula in Haskell
|
||||||
@ -114,8 +114,8 @@ horizontal composition of two natural transformations:
|
|||||||
\[I_T \circ \mu\]
|
\[I_T \circ \mu\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/assoc1.png}
|
\includegraphics[width=0.4\textwidth]{images/assoc1.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -131,8 +131,8 @@ must mean $I_T$ in this context.
|
|||||||
We can also draw the diagram in the (endo-) functor category ${[}\cat{C}, \cat{C}{]}$:
|
We can also draw the diagram in the (endo-) functor category ${[}\cat{C}, \cat{C}{]}$:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/assoc2.png}
|
\includegraphics[width=0.3\textwidth]{images/assoc2.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -142,8 +142,8 @@ $T \circ T$ which, again, can be reduced to $T$ using $\mu$. We
|
|||||||
require that the two paths produce the same result.
|
require that the two paths produce the same result.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/assoc.png}
|
\includegraphics[width=0.3\textwidth]{images/assoc.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -155,8 +155,8 @@ transformation directly to \code{T}. And, by analogy, the same should
|
|||||||
be true for $T \circ \eta$.
|
be true for $T \circ \eta$.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/unitlawcomp-1.png}
|
\includegraphics[width=0.4\textwidth]{images/unitlawcomp-1.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -275,8 +275,8 @@ up to isomorphism. The associator and the two unitors are natural
|
|||||||
isomorphisms. The laws can be represented by commuting diagrams.
|
isomorphisms. The laws can be represented by commuting diagrams.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/assocmon.png}
|
\includegraphics[width=0.5\textwidth]{images/assocmon.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -312,9 +312,9 @@ and a distinct object $i$ called the unit object, together with
|
|||||||
three natural isomorphisms called, respectively, the associator and the
|
three natural isomorphisms called, respectively, the associator and the
|
||||||
left and right unitors:
|
left and right unitors:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\alpha_{a b c} &\Colon (a \otimes b) \otimes c \to a \otimes (b \otimes c) \\
|
\alpha_{a b c} & \Colon (a \otimes b) \otimes c \to a \otimes (b \otimes c) \\
|
||||||
\lambda_a &\Colon i \otimes a \to a \\
|
\lambda_a & \Colon i \otimes a \to a \\
|
||||||
\rho_a &\Colon a \otimes i \to a
|
\rho_a & \Colon a \otimes i \to a
|
||||||
\end{align*}
|
\end{align*}
|
||||||
(There is also a coherence condition for simplifying a quadruple tensor
|
(There is also a coherence condition for simplifying a quadruple tensor
|
||||||
product.)
|
product.)
|
||||||
@ -335,14 +335,14 @@ of $m$, but they are isomorphic through the associator. Similarly
|
|||||||
for higher powers of $m$ (that's where we need the coherence
|
for higher powers of $m$ (that's where we need the coherence
|
||||||
conditions). To form a monoid we need to pick two morphisms:
|
conditions). To form a monoid we need to pick two morphisms:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\mu &\Colon m \otimes m \to m \\
|
\mu & \Colon m \otimes m \to m \\
|
||||||
\eta &\Colon i \to m
|
\eta & \Colon i \to m
|
||||||
\end{align*}
|
\end{align*}
|
||||||
where $i$ is the unit object for our tensor product.
|
where $i$ is the unit object for our tensor product.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/monoid-1.jpg}
|
\includegraphics[width=0.4\textwidth]{images/monoid-1.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -350,13 +350,13 @@ These morphisms have to satisfy associativity and unit laws, which can
|
|||||||
be expressed in terms of the following commuting diagrams:
|
be expressed in terms of the following commuting diagrams:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/assoctensor.jpg}
|
\includegraphics[width=0.5\textwidth]{images/assoctensor.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/unitmon.jpg}
|
\includegraphics[width=0.5\textwidth]{images/unitmon.jpg}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -395,8 +395,8 @@ transformations, and tensor products by composition, you get:
|
|||||||
which you may recognize as the special case of horizontal composition.
|
which you may recognize as the special case of horizontal composition.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/horizcomp.png}
|
\includegraphics[width=0.4\textwidth]{images/horizcomp.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -411,19 +411,19 @@ What's a monoid in this category? It's an object --- that is an
|
|||||||
endofunctor $T$; and two morphisms --- that is natural
|
endofunctor $T$; and two morphisms --- that is natural
|
||||||
transformations:
|
transformations:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\mu \Colon T \circ T \to T \\
|
\mu \Colon T \circ T \to T \\
|
||||||
\eta \Colon I \to T
|
\eta \Colon I \to T
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Not only that, here are the monoid laws:
|
Not only that, here are the monoid laws:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/assoc.png}
|
\includegraphics[width=0.4\textwidth]{images/assoc.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.5\textwidth]{images/unitlawcomp.png}
|
\includegraphics[width=0.5\textwidth]{images/unitlawcomp.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -431,7 +431,7 @@ They are exactly the monad laws we've seen before. Now you understand
|
|||||||
the famous quote from Saunders Mac Lane:
|
the famous quote from Saunders Mac Lane:
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
All told, monad is just a monoid in the category of endofunctors.
|
All told, monad is just a monoid in the category of endofunctors.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
You might have seen it emblazoned on some t-shirts at functional
|
You might have seen it emblazoned on some t-shirts at functional
|
||||||
programming conferences.
|
programming conferences.
|
||||||
@ -445,8 +445,8 @@ giving rise to two endofunctors, $R \circ L$ and $L \circ R$.
|
|||||||
As per an adjunction, these endofunctors are related to identity
|
As per an adjunction, these endofunctors are related to identity
|
||||||
functors through two natural transformations called unit and counit:
|
functors through two natural transformations called unit and counit:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
\eta \Colon I_{\cat{D}} \to R \circ L \\
|
\eta \Colon I_{\cat{D}} \to R \circ L \\
|
||||||
\varepsilon \Colon L \circ R \to I_{\cat{C}}
|
\varepsilon \Colon L \circ R \to I_{\cat{C}}
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
Immediately we see that the unit of an adjunction looks just like the
|
Immediately we see that the unit of an adjunction looks just like the
|
||||||
unit of a monad. It turns out that the endofunctor $R \circ L$ is
|
unit of a monad. It turns out that the endofunctor $R \circ L$ is
|
||||||
@ -467,8 +467,8 @@ because an adjunction usually involves two categories. However, the
|
|||||||
definitions of an exponential, or a function object, is an exception.
|
definitions of an exponential, or a function object, is an exception.
|
||||||
Here are the two endofunctors that form this adjunction:
|
Here are the two endofunctors that form this adjunction:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
L\ z = z\times{}s \\
|
L\ z = z\times{}s \\
|
||||||
R\ b = s \Rightarrow b
|
R\ b = s \Rightarrow b
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
You may recognize their composition as the familiar state monad:
|
You may recognize their composition as the familiar state monad:
|
||||||
\[R\ (L\ z) = s \Rightarrow (z\times{}s)\]
|
\[R\ (L\ z) = s \Rightarrow (z\times{}s)\]
|
||||||
|
@ -226,8 +226,8 @@ duality. As with the monad, we start with an endofunctor \code{T}. The
|
|||||||
two natural transformations, $\eta$ and $\mu$, that define the monad are simply
|
two natural transformations, $\eta$ and $\mu$, that define the monad are simply
|
||||||
reversed for the comonad:
|
reversed for the comonad:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\varepsilon &\Colon T \to I \\
|
\varepsilon & \Colon T \to I \\
|
||||||
\delta &\Colon T \to T^2
|
\delta & \Colon T \to T^2
|
||||||
\end{align*}
|
\end{align*}
|
||||||
The components of these transformations correspond to \code{extract}
|
The components of these transformations correspond to \code{extract}
|
||||||
and \code{duplicate}. Comonad laws are the mirror image of monad laws.
|
and \code{duplicate}. Comonad laws are the mirror image of monad laws.
|
||||||
@ -255,14 +255,14 @@ approach to a monad, we used a more general definition of a monoid as an
|
|||||||
object in a monoidal category. The construction was based on two
|
object in a monoidal category. The construction was based on two
|
||||||
morphisms:
|
morphisms:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\mu &\Colon m \otimes m \to m \\
|
\mu & \Colon m \otimes m \to m \\
|
||||||
\eta &\Colon i \to m
|
\eta & \Colon i \to m
|
||||||
\end{align*}
|
\end{align*}
|
||||||
The reversal of these morphisms produces a comonoid in a monoidal
|
The reversal of these morphisms produces a comonoid in a monoidal
|
||||||
category:
|
category:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\delta &\Colon m \to m \otimes m \\
|
\delta & \Colon m \to m \otimes m \\
|
||||||
\varepsilon &\Colon m \to i
|
\varepsilon & \Colon m \to i
|
||||||
\end{align*}
|
\end{align*}
|
||||||
One can write a definition of a comonoid in Haskell:
|
One can write a definition of a comonoid in Haskell:
|
||||||
|
|
||||||
@ -280,7 +280,7 @@ Now consider comonoid laws that are dual to the monoid unit laws.
|
|||||||
Here, \code{lambda} and \code{rho} are the left and right unitors,
|
Here, \code{lambda} and \code{rho} are the left and right unitors,
|
||||||
respectively (see the definition of
|
respectively (see the definition of
|
||||||
\hyperref[monads-categorically]{monoidal
|
\hyperref[monads-categorically]{monoidal
|
||||||
categories}). Plugging in the definitions, we get:
|
categories}). Plugging in the definitions, we get:
|
||||||
|
|
||||||
\src{snippet29}
|
\src{snippet29}
|
||||||
which proves that \code{g = id}. Similarly, the second law expands
|
which proves that \code{g = id}. Similarly, the second law expands
|
||||||
@ -296,7 +296,7 @@ And it turns out that, just like the monad is a monoid in the category
|
|||||||
of endofunctors,
|
of endofunctors,
|
||||||
|
|
||||||
\begin{quote}
|
\begin{quote}
|
||||||
The comonad is a comonoid in the category of endofunctors.
|
The comonad is a comonoid in the category of endofunctors.
|
||||||
\end{quote}
|
\end{quote}
|
||||||
|
|
||||||
\section{The Store Comonad}
|
\section{The Store Comonad}
|
||||||
@ -307,8 +307,8 @@ It's called the costate comonad or, alternatively, the store comonad.
|
|||||||
We've seen before that the state monad is generated by the adjunction
|
We've seen before that the state monad is generated by the adjunction
|
||||||
that defines the exponentials:
|
that defines the exponentials:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
L\ z &= z\times{}s \\
|
L\ z & = z\times{}s \\
|
||||||
R\ a &= s \Rightarrow a
|
R\ a & = s \Rightarrow a
|
||||||
\end{align*}
|
\end{align*}
|
||||||
We'll use the same adjunction to define the costate comonad. A comonad
|
We'll use the same adjunction to define the costate comonad. A comonad
|
||||||
is defined by the composition $L \circ R$:
|
is defined by the composition $L \circ R$:
|
||||||
@ -336,8 +336,8 @@ can be rewritten as partially applied data constructor:
|
|||||||
\src{snippet35}
|
\src{snippet35}
|
||||||
We construct $\delta$, or \code{duplicate}, as the horizontal composition:
|
We construct $\delta$, or \code{duplicate}, as the horizontal composition:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\delta &\Colon L \circ R \to L \circ R \circ L \circ R \\
|
\delta & \Colon L \circ R \to L \circ R \circ L \circ R \\
|
||||||
\delta &= L \circ \eta \circ R
|
\delta & = L \circ \eta \circ R
|
||||||
\end{align*}
|
\end{align*}
|
||||||
We have to sneak $\eta$ through the leftmost $L$, which is the
|
We have to sneak $\eta$ through the leftmost $L$, which is the
|
||||||
\code{Product} functor. It means acting with $\eta$, or \code{Store f}, on
|
\code{Product} functor. It means acting with $\eta$, or \code{Store f}, on
|
||||||
@ -393,8 +393,8 @@ could be implemented to read the value of the \code{s} field from
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Implement the Conway's Game of Life using the \code{Store} comonad.
|
Implement the Conway's Game of Life using the \code{Store} comonad.
|
||||||
Hint: What type do you pick for \code{s}?
|
Hint: What type do you pick for \code{s}?
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -7,8 +7,8 @@ more juice can we squeeze out of this simple concept?
|
|||||||
Let's try. Take this definition of a monoid as a set $m$ with a
|
Let's try. Take this definition of a monoid as a set $m$ with a
|
||||||
pair of functions:
|
pair of functions:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\mu &\Colon m\times{}m \to m \\
|
\mu & \Colon m\times{}m \to m \\
|
||||||
\eta &\Colon 1 \to m
|
\eta & \Colon 1 \to m
|
||||||
\end{align*}
|
\end{align*}
|
||||||
Here, 1 is the terminal object in $\Set$ --- the singleton set.
|
Here, 1 is the terminal object in $\Set$ --- the singleton set.
|
||||||
The first function defines multiplication (it takes a pair of elements
|
The first function defines multiplication (it takes a pair of elements
|
||||||
@ -20,8 +20,8 @@ and just consider ``potential monoids.'' A pair of functions is an
|
|||||||
element of a Cartesian product of two sets of functions. We know that
|
element of a Cartesian product of two sets of functions. We know that
|
||||||
these sets may be represented as exponential objects:
|
these sets may be represented as exponential objects:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\mu &\in m^{m\times{}m} \\
|
\mu & \in m^{m\times{}m} \\
|
||||||
\eta &\in m^1
|
\eta & \in m^1
|
||||||
\end{align*}
|
\end{align*}
|
||||||
The Cartesian product of these two sets is:
|
The Cartesian product of these two sets is:
|
||||||
\[m^{m\times{}m}\times{}m^1\]
|
\[m^{m\times{}m}\times{}m^1\]
|
||||||
@ -42,9 +42,9 @@ $m \to m$. As an example, integers form a group with
|
|||||||
addition as a binary operation, zero as the unit, and negation as the
|
addition as a binary operation, zero as the unit, and negation as the
|
||||||
inverse. To define a group we would start with a triple of functions:
|
inverse. To define a group we would start with a triple of functions:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
m\times{}m \to m \\
|
m\times{}m \to m \\
|
||||||
m \to m \\
|
m \to m \\
|
||||||
1 \to m
|
1 \to m
|
||||||
\end{align*}
|
\end{align*}
|
||||||
As before, we can combine all these triples into one set of functions:
|
As before, we can combine all these triples into one set of functions:
|
||||||
\[m\times{}m + m + 1 \to m\]
|
\[m\times{}m + m + 1 \to m\]
|
||||||
@ -94,7 +94,7 @@ In the monoid example, the functor in question is:
|
|||||||
\src{snippet02}
|
\src{snippet02}
|
||||||
This is Haskell for $1 + a\times{}a$ (remember
|
This is Haskell for $1 + a\times{}a$ (remember
|
||||||
\hyperref[simple-algebraic-data-types]{algebraic
|
\hyperref[simple-algebraic-data-types]{algebraic
|
||||||
data structures}).
|
data structures}).
|
||||||
|
|
||||||
A ring would be defined using the following functor:
|
A ring would be defined using the following functor:
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ Of course, this is a hand-waving argument, and I'll make it more
|
|||||||
rigorous later.
|
rigorous later.
|
||||||
|
|
||||||
Applying an endofunctor infinitely many times produces a \newterm{fixed
|
Applying an endofunctor infinitely many times produces a \newterm{fixed
|
||||||
point}, an object defined as:
|
point}, an object defined as:
|
||||||
\[Fix\ f = f\ (Fix\ f)\]
|
\[Fix\ f = f\ (Fix\ f)\]
|
||||||
The intuition behind this definition is that, since we applied
|
The intuition behind this definition is that, since we applied
|
||||||
$f$ infinitely many times to get $Fix\ f$, applying it one
|
$f$ infinitely many times to get $Fix\ f$, applying it one
|
||||||
@ -208,8 +208,8 @@ be equal:
|
|||||||
\[g \circ F\ m = m \circ f\]
|
\[g \circ F\ m = m \circ f\]
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/alg.png}
|
\includegraphics[width=0.3\textwidth]{images/alg.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -227,8 +227,8 @@ $m$ from it to any other F-algebra. Since $m$ is a
|
|||||||
homomorphism, the following diagram must commute:
|
homomorphism, the following diagram must commute:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/alg2.png}
|
\includegraphics[width=0.3\textwidth]{images/alg2.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -242,8 +242,8 @@ homomorphism $m$ from it to $(F\ i, F\ j)$. The following
|
|||||||
diagram must commute:
|
diagram must commute:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/alg3a.png}
|
\includegraphics[width=0.3\textwidth]{images/alg3a.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -251,8 +251,8 @@ But we also have this trivially commuting diagram (both paths are the
|
|||||||
same!):
|
same!):
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.3\textwidth]{images/alg3.png}
|
\includegraphics[width=0.3\textwidth]{images/alg3.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -261,8 +261,8 @@ algebras, mapping $(F\ i, F\ j)$ to $(i, j)$. We can
|
|||||||
glue these two diagrams together to get:
|
glue these two diagrams together to get:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.6\textwidth]{images/alg4.png}
|
\includegraphics[width=0.6\textwidth]{images/alg4.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -292,8 +292,8 @@ the fixed point does not depend on $a$.
|
|||||||
Natural numbers can also be defined as an F-algebra. The starting point
|
Natural numbers can also be defined as an F-algebra. The starting point
|
||||||
is the pair of morphisms:
|
is the pair of morphisms:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
zero &\Colon 1 \to N \\
|
zero & \Colon 1 \to N \\
|
||||||
succ &\Colon N \to N
|
succ & \Colon N \to N
|
||||||
\end{align*}
|
\end{align*}
|
||||||
The first one picks the zero, and the second one maps all numbers to
|
The first one picks the zero, and the second one maps all numbers to
|
||||||
their successors. As before, we can combine the two into one:
|
their successors. As before, we can combine the two into one:
|
||||||
@ -318,8 +318,8 @@ algebra to any other algebra over the same functor. Let's pick an
|
|||||||
algebra whose carrier is \code{a} and the evaluator is \code{alg}.
|
algebra whose carrier is \code{a} and the evaluator is \code{alg}.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/alg5.png}
|
\includegraphics[width=0.4\textwidth]{images/alg5.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -332,8 +332,8 @@ isomorphism. We called its inverse \code{unFix}. We can therefore flip
|
|||||||
one arrow in this diagram to get:
|
one arrow in this diagram to get:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/alg6.png}
|
\includegraphics[width=0.4\textwidth]{images/alg6.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -431,8 +431,8 @@ coalgebra. For every other algebra $(a, f)$ there is a unique
|
|||||||
homomorphism $m$ that makes the following diagram commute:
|
homomorphism $m$ that makes the following diagram commute:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.4\textwidth]{images/alg7.png}
|
\includegraphics[width=0.4\textwidth]{images/alg7.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -523,21 +523,21 @@ with the comonad structure. We'll talk about this in the next section.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Implement the evaluation function for a ring of polynomials of one
|
Implement the evaluation function for a ring of polynomials of one
|
||||||
variable. You can represent a polynomial as a list of coefficients in
|
variable. You can represent a polynomial as a list of coefficients in
|
||||||
front of powers of $x$. For instance, $4x^2-1$ would be
|
front of powers of $x$. For instance, $4x^2-1$ would be
|
||||||
represented as (starting with the zero'th power)
|
represented as (starting with the zero'th power)
|
||||||
\code{{[}-1, 0, 4{]}}.
|
\code{{[}-1, 0, 4{]}}.
|
||||||
\item
|
\item
|
||||||
Generalize the previous construction to polynomials of many
|
Generalize the previous construction to polynomials of many
|
||||||
independent variables, like $x^2y-3y^3z$.
|
independent variables, like $x^2y-3y^3z$.
|
||||||
\item
|
\item
|
||||||
Implement the algebra for the ring of $2\times{}2$ matrices.
|
Implement the algebra for the ring of $2\times{}2$ matrices.
|
||||||
\item
|
\item
|
||||||
Define a coalgebra whose anamorphism produces a list of squares of
|
Define a coalgebra whose anamorphism produces a list of squares of
|
||||||
natural numbers.
|
natural numbers.
|
||||||
\item
|
\item
|
||||||
Use \code{unfoldr} to generate a list of the first $n$ primes.
|
Use \code{unfoldr} to generate a list of the first $n$ primes.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -7,14 +7,14 @@ but we can also answer a few interesting questions.
|
|||||||
|
|
||||||
One such question concerns the relation between monads and adjunctions.
|
One such question concerns the relation between monads and adjunctions.
|
||||||
As we've seen, every adjunction \hyperref[monads-categorically]{defines
|
As we've seen, every adjunction \hyperref[monads-categorically]{defines
|
||||||
a monad} (and a comonad). The question is: Can every monad (comonad) be
|
a monad} (and a comonad). The question is: Can every monad (comonad) be
|
||||||
derived from an adjunction? The answer is positive. There is a whole
|
derived from an adjunction? The answer is positive. There is a whole
|
||||||
family of adjunctions that generate a given monad. I'll show you two
|
family of adjunctions that generate a given monad. I'll show you two
|
||||||
such adjunctions.
|
such adjunctions.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\includegraphics[width=0.25\textwidth]{images/pigalg.png}
|
\includegraphics[width=0.25\textwidth]{images/pigalg.png}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -22,8 +22,8 @@ Let's review the definitions. A monad is an endofunctor $m$
|
|||||||
equipped with two natural transformations that satisfy some coherence
|
equipped with two natural transformations that satisfy some coherence
|
||||||
conditions. The components of these transformations at $a$ are:
|
conditions. The components of these transformations at $a$ are:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
\eta_a &\Colon a \to m\ a \\
|
\eta_a & \Colon a \to m\ a \\
|
||||||
\mu_a &\Colon m\ (m\ a) \to m\ a
|
\mu_a & \Colon m\ (m\ a) \to m\ a
|
||||||
\end{align*}
|
\end{align*}
|
||||||
An algebra for the same endofunctor is a selection of a particular
|
An algebra for the same endofunctor is a selection of a particular
|
||||||
object --- the carrier $a$ --- together with the morphism:
|
object --- the carrier $a$ --- together with the morphism:
|
||||||
@ -130,8 +130,8 @@ evaluator.
|
|||||||
We still have to show that this is a T-algebra. For that, two coherence
|
We still have to show that this is a T-algebra. For that, two coherence
|
||||||
conditions must be satisfied:
|
conditions must be satisfied:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
alg &\circ \eta_{Ta} = \id_{Ta} \\
|
alg & \circ \eta_{Ta} = \id_{Ta} \\
|
||||||
alg &\circ \mu_a = alg \circ T\ alg
|
alg & \circ \mu_a = alg \circ T\ alg
|
||||||
\end{align*}
|
\end{align*}
|
||||||
But these are just monadic laws, if you plug in $\mu$ for the
|
But these are just monadic laws, if you plug in $\mu$ for the
|
||||||
algebra.
|
algebra.
|
||||||
@ -174,13 +174,13 @@ diagram that makes $f$ a T-algebra may be re-interpreted to show
|
|||||||
that it's a homomorphism of T-algebras:
|
that it's a homomorphism of T-algebras:
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
\centering
|
\centering
|
||||||
\begin{tikzcd}[column sep=large, row sep=large]
|
\begin{tikzcd}[column sep=large, row sep=large]
|
||||||
T(Ta) \arrow[r, "T f"] \arrow[d, "\mu_a"]
|
T(Ta) \arrow[r, "T f"] \arrow[d, "\mu_a"]
|
||||||
& Ta \arrow[d, "f"] \\
|
& Ta \arrow[d, "f"] \\
|
||||||
Ta \arrow[r, "f"]
|
Ta \arrow[r, "f"]
|
||||||
& a
|
& a
|
||||||
\end{tikzcd}
|
\end{tikzcd}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\noindent
|
\noindent
|
||||||
@ -260,20 +260,20 @@ Composition of morphisms in the Kleisli category is defined in terms of
|
|||||||
monadic composition of Kleisli arrows. For instance, let's compose
|
monadic composition of Kleisli arrows. For instance, let's compose
|
||||||
$g_{\cat{K}}$ after $f_{\cat{K}}$. In the Kleisli category we have:
|
$g_{\cat{K}}$ after $f_{\cat{K}}$. In the Kleisli category we have:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f_{\cat{K}} \Colon a \to b \\
|
f_{\cat{K}} \Colon a \to b \\
|
||||||
g_{\cat{K}} \Colon b \to c
|
g_{\cat{K}} \Colon b \to c
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
which, in the category $\cat{C}$, corresponds to:
|
which, in the category $\cat{C}$, corresponds to:
|
||||||
\begin{gather*}
|
\begin{gather*}
|
||||||
f \Colon a \to T\ b \\
|
f \Colon a \to T\ b \\
|
||||||
g \Colon b \to T\ c
|
g \Colon b \to T\ c
|
||||||
\end{gather*}
|
\end{gather*}
|
||||||
We define the composition:
|
We define the composition:
|
||||||
\[h_{\cat{K}} = g_{\cat{K}} \circ f_{\cat{K}}\]
|
\[h_{\cat{K}} = g_{\cat{K}} \circ f_{\cat{K}}\]
|
||||||
as a Kleisli arrow in $\cat{C}$
|
as a Kleisli arrow in $\cat{C}$
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
h &\Colon a \to T\ c \\
|
h & \Colon a \to T\ c \\
|
||||||
h &= \mu \circ (T\ g) \circ f
|
h & = \mu \circ (T\ g) \circ f
|
||||||
\end{align*}
|
\end{align*}
|
||||||
In Haskell we would write it as:
|
In Haskell we would write it as:
|
||||||
|
|
||||||
@ -377,8 +377,8 @@ for the functor $Store\ s$:
|
|||||||
\src{snippet07}
|
\src{snippet07}
|
||||||
This coalgebra can be also expressed as a pair of functions:
|
This coalgebra can be also expressed as a pair of functions:
|
||||||
\begin{align*}
|
\begin{align*}
|
||||||
set &\Colon a \to s \to a \\
|
set & \Colon a \to s \to a \\
|
||||||
get &\Colon a \to s
|
get & \Colon a \to s
|
||||||
\end{align*}
|
\end{align*}
|
||||||
(Think of $a$ as standing for ``all,'' and $s$ as a
|
(Think of $a$ as standing for ``all,'' and $s$ as a
|
||||||
``small'' part of it.) In terms of this pair, we have:
|
``small'' part of it.) In terms of this pair, we have:
|
||||||
@ -433,14 +433,14 @@ the \code{Store} functor.
|
|||||||
\section{Challenges}
|
\section{Challenges}
|
||||||
|
|
||||||
\begin{enumerate}
|
\begin{enumerate}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
What is the action of the free functor
|
What is the action of the free functor
|
||||||
$F \Colon C \to C^T$ on morphisms. Hint: use the
|
$F \Colon C \to C^T$ on morphisms. Hint: use the
|
||||||
naturality condition for monadic $\mu$.
|
naturality condition for monadic $\mu$.
|
||||||
\item
|
\item
|
||||||
Define the adjunction:
|
Define the adjunction:
|
||||||
\[U^W \dashv F^W\]
|
\[U^W \dashv F^W\]
|
||||||
\item
|
\item
|
||||||
Prove that the above adjunction reproduces the original comonad.
|
Prove that the above adjunction reproduces the original comonad.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
|
@ -4,5 +4,5 @@
|
|||||||
\chapter*{A note from the editor}
|
\chapter*{A note from the editor}
|
||||||
\addcontentsline{toc}{chapter}{A note from the editor}
|
\addcontentsline{toc}{chapter}{A note from the editor}
|
||||||
\input{content/\OPTCustomLanguage/editor-note}
|
\input{content/\OPTCustomLanguage/editor-note}
|
||||||
}
|
}
|
||||||
\fi
|
\fi
|
@ -13,18 +13,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -51,9 +40,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -113,5 +102,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -13,18 +13,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -51,9 +40,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -113,5 +102,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -13,18 +13,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -51,9 +40,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -113,5 +102,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -46,9 +46,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -107,5 +107,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -14,26 +14,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
% \setmainfont{AlegreyaSans-Regular}[
|
|
||||||
% BoldFont={AlegreyaSans-Bold},
|
|
||||||
% ItalicFont={AlegreyaSans-Italic},
|
|
||||||
% UprightFeatures={SmallCapsFont=AlegreyaSansSC-Regular},
|
|
||||||
% ItalicFeatures={SmallCapsFont=AlegreyaSansSC-Italic},
|
|
||||||
% BoldFeatures={SmallCapsFont=AlegreyaSansSC-Bold},
|
|
||||||
% BoldItalicFeatures={SmallCapsFont=AlegreyaSansSC-BoldItalic},
|
|
||||||
% ]
|
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -60,9 +41,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -123,5 +104,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -14,26 +14,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
% \setmainfont{AlegreyaSans-Regular}[
|
|
||||||
% BoldFont={AlegreyaSans-Bold},
|
|
||||||
% ItalicFont={AlegreyaSans-Italic},
|
|
||||||
% UprightFeatures={SmallCapsFont=AlegreyaSansSC-Regular},
|
|
||||||
% ItalicFeatures={SmallCapsFont=AlegreyaSansSC-Italic},
|
|
||||||
% BoldFeatures={SmallCapsFont=AlegreyaSansSC-Bold},
|
|
||||||
% BoldItalicFeatures={SmallCapsFont=AlegreyaSansSC-BoldItalic},
|
|
||||||
% ]
|
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -60,9 +41,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -123,5 +104,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -14,18 +14,7 @@
|
|||||||
\usetikzlibrary{calc,positioning, shadings}
|
\usetikzlibrary{calc,positioning, shadings}
|
||||||
\usepackage[T1]{fontenc}
|
\usepackage[T1]{fontenc}
|
||||||
\usepackage{fontspec}
|
\usepackage{fontspec}
|
||||||
|
\usepackage{Alegreya}
|
||||||
\setmainfont[
|
|
||||||
Path=fonts/,
|
|
||||||
Extension=.otf,
|
|
||||||
UprightFont=*-Regular,
|
|
||||||
ItalicFont=*-Italic,
|
|
||||||
BoldFont=*-Bold,
|
|
||||||
UprightFeatures={SmallCapsFont=*SC-Regular},
|
|
||||||
ItalicFeatures={SmallCapsFont=*SC-Italic},
|
|
||||||
BoldFeatures={SmallCapsFont=*SC-Bold},
|
|
||||||
BoldItalicFeatures={SmallCapsFont=*SC-BoldItalic},
|
|
||||||
]{AlegreyaSans}
|
|
||||||
|
|
||||||
\newcommand{\olpath}{../}
|
\newcommand{\olpath}{../}
|
||||||
\newcommand{\whitebg}[1]{%
|
\newcommand{\whitebg}[1]{%
|
||||||
@ -52,9 +41,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -115,5 +104,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
@ -46,9 +46,9 @@
|
|||||||
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
\definecolor{BackgroundColor}{HTML}{f3f6ed}
|
||||||
\definecolor{SpineBackColor}{HTML}{262626}
|
\definecolor{SpineBackColor}{HTML}{262626}
|
||||||
|
|
||||||
\begin{document}
|
\begin{document}
|
||||||
|
|
||||||
\begin{bookcover}
|
\begin{bookcover}
|
||||||
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
\bookcovercomponent{color}{bg whole}{color=BackgroundColor}
|
||||||
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
\bookcovercomponent{color}{spine}{color=SpineBackColor}
|
||||||
\bookcovercomponent{normal}{front}{
|
\bookcovercomponent{normal}{front}{
|
||||||
@ -107,5 +107,5 @@
|
|||||||
\end{center}
|
\end{center}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
}
|
}
|
||||||
\end{bookcover}
|
\end{bookcover}
|
||||||
\end{document}
|
\end{document}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user